text
stringlengths 6
13.6M
| id
stringlengths 13
176
| metadata
dict | __index_level_0__
int64 0
1.69k
|
---|---|---|---|
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
part of dart.ui;
/// A composable [SceneNode].
base class SceneNode extends NativeFieldWrapperClass1 {
@pragma('vm:entry-point')
SceneNode._create() {
_constructor();
}
String? _debugName;
/// Creates a scene node from the asset with key [assetKey].
///
/// The asset must be a file produced as the output of the `scenec` importer.
/// The constructed object should then be reused via the [shader]
/// method to create [Shader] objects that can be used by [Shader.paint].
static SceneNodeValue fromAsset(String assetKey) {
// The flutter tool converts all asset keys with spaces into URI
// encoded paths (replacing ' ' with '%20', for example). We perform
// the same encoding here so that users can load assets with the same
// key they have written in the pubspec.
final String encodedKey = Uri(path: Uri.encodeFull(assetKey)).path;
{
final SceneNodeValue? futureSceneNode = _ipsceneRegistry[encodedKey]?.target;
if (futureSceneNode != null) {
return futureSceneNode;
}
}
final SceneNode sceneNode = SceneNode._create();
final Future<SceneNode> futureSceneNode = _futurize((_Callback<void> callback) {
final String error = sceneNode._initFromAsset(assetKey, callback);
if (error.isNotEmpty) {
return error;
}
assert(() {
sceneNode._debugName = assetKey;
return true;
}());
return null;
}).then((_) => sceneNode);
final SceneNodeValue result = SceneNodeValue.fromFuture(futureSceneNode);
_ipsceneRegistry[encodedKey] = WeakReference<SceneNodeValue>(result);
return result;
}
static SceneNodeValue fromTransform(Float64List matrix4) {
final SceneNode sceneNode = SceneNode._create();
sceneNode._initFromTransform(matrix4);
return SceneNodeValue.fromValue(sceneNode);
}
void addChild(SceneNode sceneNode) {
_addChild(sceneNode);
}
void setTransform(Float64List matrix4) {
_setTransform(matrix4);
}
void setAnimationState(String animationName, bool playing, bool loop, double weight, double timeScale) {
_setAnimationState(animationName, playing, loop, weight, timeScale);
}
void seekAnimation(String animationName, double time) {
_seekAnimation(animationName, time);
}
// This is a cache of ipscene-backed scene nodes that have been loaded via
// SceneNode.fromAsset. It holds weak references to the SceneNodes so that the
// case where an in-use ipscene is requested again can be fast, but scenes
// that are no longer referenced are not retained because of the cache.
static final Map<String, WeakReference<SceneNodeValue>> _ipsceneRegistry =
<String, WeakReference<SceneNodeValue>>{};
static Future<void> _reinitializeScene(String assetKey) async {
final WeakReference<SceneNodeValue>? sceneRef = _ipsceneRegistry[assetKey];
// If a scene for the asset isn't already registered, then there's no
// need to reinitialize it.
if (sceneRef == null) {
return;
}
final Future<SceneNode>? sceneNodeFuture = sceneRef.target?.future;
if (sceneNodeFuture == null) {
return;
}
final SceneNode sceneNode = await sceneNodeFuture;
await _futurize((_Callback<void> callback) {
final String error = sceneNode._initFromAsset(assetKey, callback);
if (error.isNotEmpty) {
return error;
}
return null;
});
}
@Native<Void Function(Handle)>(symbol: 'SceneNode::Create')
external void _constructor();
@Native<Handle Function(Pointer<Void>, Handle, Handle)>(symbol: 'SceneNode::initFromAsset')
external String _initFromAsset(String assetKey, _Callback<void> completionCallback);
@Native<Void Function(Pointer<Void>, Handle)>(symbol: 'SceneNode::initFromTransform')
external void _initFromTransform(Float64List matrix4);
@Native<Void Function(Pointer<Void>, Handle)>(symbol: 'SceneNode::AddChild')
external void _addChild(SceneNode sceneNode);
@Native<Void Function(Pointer<Void>, Handle)>(symbol: 'SceneNode::SetTransform')
external void _setTransform(Float64List matrix4);
@Native<Void Function(Pointer<Void>, Handle, Bool, Bool, Double, Double)>(symbol: 'SceneNode::SetAnimationState')
external void _setAnimationState(String animationName, bool playing, bool loop, double weight, double timeScale);
@Native<Void Function(Pointer<Void>, Handle, Double)>(symbol: 'SceneNode::SeekAnimation')
external void _seekAnimation(String animationName, double time);
/// Returns a fresh instance of [SceneShader].
SceneShader sceneShader() => SceneShader._(this, debugName: _debugName);
}
class SceneNodeValue {
SceneNodeValue._(this._future, this._value) {
_future?.then((SceneNode result) => _value = result);
}
static SceneNodeValue fromFuture(Future<SceneNode> future) {
return SceneNodeValue._(future, null);
}
static SceneNodeValue fromValue(SceneNode value) {
return SceneNodeValue._(null, value);
}
final Future<SceneNode>? _future;
SceneNode? _value;
bool get isComplete {
return _value != null;
}
Future<SceneNode>? get future {
return _future;
}
SceneNode? get value {
return _value;
}
/// Calls `callback` when the `SceneNode` has finished initializing. If the
/// initialization is already finished, `callback` is called synchronously.
SceneNodeValue whenComplete(void Function(SceneNode) callback) {
if (_value == null && _future == null) {
return this;
}
if (_value != null) {
callback(_value!);
return this;
}
// _future != null
_future!.then((SceneNode node) => callback(node));
return this;
}
}
/// A [Shader] generated from a [SceneNode].
///
/// Instances of this class can be obtained from the
/// [SceneNode.sceneShader] method.
base class SceneShader extends Shader {
SceneShader._(SceneNode node, { String? debugName }) : _debugName = debugName, super._() {
_constructor(node);
}
// ignore: unused_field
final String? _debugName;
void setCameraTransform(Float64List matrix4) {
_setCameraTransform(matrix4);
}
/// Releases the native resources held by the [SceneShader].
///
/// After this method is called, calling methods on the shader, or attaching
/// it to a [Paint] object will fail with an exception. Calling [dispose]
/// twice will also result in an exception being thrown.
@override
void dispose() {
super.dispose();
_dispose();
}
@Native<Void Function(Handle, Handle)>(symbol: 'SceneShader::Create')
external void _constructor(SceneNode node);
@Native<Void Function(Pointer<Void>, Handle)>(symbol: 'SceneShader::SetCameraTransform')
external void _setCameraTransform(Float64List matrix4);
@Native<Void Function(Pointer<Void>)>(symbol: 'SceneShader::Dispose')
external void _dispose();
}
| engine/lib/ui/experiments/scene.dart/0 | {
"file_path": "engine/lib/ui/experiments/scene.dart",
"repo_id": "engine",
"token_count": 2239
} | 271 |
#version 320 es
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
precision highp float;
layout(location = 0) out vec4 color;
layout(location = 0) uniform sampler2D child1;
layout(location = 1) uniform float a;
layout(location = 2) uniform sampler2D child2;
layout(location = 3) uniform float b;
void main() {
// child1 is a 10x10 image where the left half is blue and the right
// half is green, and b should be 1, so c1 should be vec4(0, 1, 0, 1)
vec4 c1 = texture(child1, vec2(b, 0));
// child2 only contains vec4(0, 1, 0, 1).
vec4 c2 = texture(child2, vec2(0));
color = c1 * c2;
}
| engine/lib/ui/fixtures/shaders/general_shaders/children_and_uniforms.frag/0 | {
"file_path": "engine/lib/ui/fixtures/shaders/general_shaders/children_and_uniforms.frag",
"repo_id": "engine",
"token_count": 242
} | 272 |
#version 320 es
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
precision highp float;
layout(location = 0) out vec4 fragColor;
layout(location = 0) uniform float a;
void main() {
fragColor = vec4(
// length of a zero vector is 0.0
length(vec3(a - 1.0, 0.0, 0.0)),
// sqrt(3.0^2.0 + 4.0^2.0) - 4.0 = 5.0 - 4.0 = 1.0
length(vec2(a * 3.0, 4.0)) - 4.0, 0.0,
// sqrt(4.0^2.0 + (-4.0)^2.0 + (-4.0)^2.0) + 4.0^2.0) - 7.0 = sqrt(16.0
// + 16.0 + 16.0 + 16.0) - 7.0 = sqrt(64.0) - 7.0 = 8.0 - 7.0 = 1.0
length(vec4(a * 4.0, -4.0, -4.0, 4.0)) - 7.0);
}
| engine/lib/ui/fixtures/shaders/supported_glsl_op_shaders/66_length.frag/0 | {
"file_path": "engine/lib/ui/fixtures/shaders/supported_glsl_op_shaders/66_length.frag",
"repo_id": "engine",
"token_count": 352
} | 273 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
part of dart.ui;
/// Static methods to allow for simple sharing of [SendPort]s across [Isolate]s.
///
/// All isolates share a global mapping of names to ports. An isolate can
/// register a [SendPort] with a given name using [registerPortWithName];
/// another isolate can then look up that port using [lookupPortByName].
///
/// To create a [SendPort], first create a [ReceivePort], then use
/// [ReceivePort.sendPort].
///
/// Since multiple isolates can each obtain the same [SendPort] associated with
/// a particular [ReceivePort], the protocol built on top of this mechanism
/// should typically consist of a single message. If more elaborate two-way
/// communication or multiple-message communication is necessary, it is
/// recommended to establish a separate communication channel in that first
/// message (e.g. by passing a dedicated [SendPort]).
abstract final class IsolateNameServer {
/// Looks up the [SendPort] associated with a given name.
///
/// Returns null if the name does not exist. To register the name in the first
/// place, consider [registerPortWithName].
///
/// The `name` argument must not be null.
static SendPort? lookupPortByName(String name) {
return _lookupPortByName(name);
}
/// Registers a [SendPort] with a given name.
///
/// Returns true if registration is successful, and false if the name entry
/// already existed (in which case the earlier registration is left
/// unchanged). To remove a registration, consider [removePortNameMapping].
///
/// Once a port has been registered with a name, it can be obtained from any
/// [Isolate] using [lookupPortByName].
///
/// Multiple isolates should avoid attempting to register ports with the same
/// name, as there is an inherent race condition in doing so.
///
/// The `port` and `name` arguments must not be null.
static bool registerPortWithName(SendPort port, String name) {
return _registerPortWithName(port, name);
}
/// Removes a name-to-[SendPort] mapping given its name.
///
/// Returns true if the mapping was successfully removed, false if the mapping
/// did not exist. To add a registration, consider [registerPortWithName].
///
/// Generally, removing a port name mapping is an inherently racy operation
/// (another isolate could have obtained the name just prior to the name being
/// removed, and thus would still be able to communicate over the port even
/// after it has been removed).
///
/// The `name` argument must not be null.
static bool removePortNameMapping(String name) {
return _removePortNameMapping(name);
}
@Native<Handle Function(Handle)>(symbol: 'IsolateNameServerNatives::LookupPortByName')
external static SendPort? _lookupPortByName(String name);
@Native<Bool Function(Handle, Handle)>(symbol: 'IsolateNameServerNatives::RegisterPortWithName')
external static bool _registerPortWithName(SendPort port, String name);
@Native<Bool Function(Handle)>(symbol: 'IsolateNameServerNatives::RemovePortNameMapping')
external static bool _removePortNameMapping(String name);
}
| engine/lib/ui/isolate_name_server.dart/0 | {
"file_path": "engine/lib/ui/isolate_name_server.dart",
"repo_id": "engine",
"token_count": 856
} | 274 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/lib/ui/painting/display_list_deferred_image_gpu_impeller.h"
#include <utility>
#include "flutter/fml/make_copyable.h"
namespace flutter {
sk_sp<DlDeferredImageGPUImpeller> DlDeferredImageGPUImpeller::Make(
std::unique_ptr<LayerTree> layer_tree,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate,
fml::RefPtr<fml::TaskRunner> raster_task_runner) {
return sk_sp<DlDeferredImageGPUImpeller>(new DlDeferredImageGPUImpeller(
DlDeferredImageGPUImpeller::ImageWrapper::Make(
std::move(layer_tree), std::move(snapshot_delegate),
std::move(raster_task_runner))));
}
sk_sp<DlDeferredImageGPUImpeller> DlDeferredImageGPUImpeller::Make(
sk_sp<DisplayList> display_list,
const SkISize& size,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate,
fml::RefPtr<fml::TaskRunner> raster_task_runner) {
return sk_sp<DlDeferredImageGPUImpeller>(new DlDeferredImageGPUImpeller(
DlDeferredImageGPUImpeller::ImageWrapper::Make(
std::move(display_list), size, std::move(snapshot_delegate),
std::move(raster_task_runner))));
}
DlDeferredImageGPUImpeller::DlDeferredImageGPUImpeller(
std::shared_ptr<ImageWrapper> wrapper)
: wrapper_(std::move(wrapper)) {}
// |DlImage|
DlDeferredImageGPUImpeller::~DlDeferredImageGPUImpeller() = default;
// |DlImage|
sk_sp<SkImage> DlDeferredImageGPUImpeller::skia_image() const {
return nullptr;
};
// |DlImage|
std::shared_ptr<impeller::Texture>
DlDeferredImageGPUImpeller::impeller_texture() const {
if (!wrapper_) {
return nullptr;
}
return wrapper_->texture();
}
// |DlImage|
bool DlDeferredImageGPUImpeller::isOpaque() const {
// Impeller doesn't currently implement opaque alpha types.
return false;
}
// |DlImage|
bool DlDeferredImageGPUImpeller::isTextureBacked() const {
return wrapper_ && wrapper_->isTextureBacked();
}
// |DlImage|
bool DlDeferredImageGPUImpeller::isUIThreadSafe() const {
return true;
}
// |DlImage|
SkISize DlDeferredImageGPUImpeller::dimensions() const {
if (!wrapper_) {
return SkISize::MakeEmpty();
}
return wrapper_->size();
}
// |DlImage|
size_t DlDeferredImageGPUImpeller::GetApproximateByteSize() const {
auto size = sizeof(DlDeferredImageGPUImpeller);
if (wrapper_) {
if (wrapper_->texture()) {
size += wrapper_->texture()
->GetTextureDescriptor()
.GetByteSizeOfBaseMipLevel();
} else {
size += wrapper_->size().width() * wrapper_->size().height() * 4;
}
}
return size;
}
std::shared_ptr<DlDeferredImageGPUImpeller::ImageWrapper>
DlDeferredImageGPUImpeller::ImageWrapper::Make(
sk_sp<DisplayList> display_list,
const SkISize& size,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate,
fml::RefPtr<fml::TaskRunner> raster_task_runner) {
auto wrapper = std::shared_ptr<ImageWrapper>(new ImageWrapper(
std::move(display_list), size, std::move(snapshot_delegate),
std::move(raster_task_runner)));
wrapper->SnapshotDisplayList();
return wrapper;
}
std::shared_ptr<DlDeferredImageGPUImpeller::ImageWrapper>
DlDeferredImageGPUImpeller::ImageWrapper::Make(
std::unique_ptr<LayerTree> layer_tree,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate,
fml::RefPtr<fml::TaskRunner> raster_task_runner) {
auto wrapper = std::shared_ptr<ImageWrapper>(new ImageWrapper(
nullptr, layer_tree->frame_size(), std::move(snapshot_delegate),
std::move(raster_task_runner)));
wrapper->SnapshotDisplayList(std::move(layer_tree));
return wrapper;
}
DlDeferredImageGPUImpeller::ImageWrapper::ImageWrapper(
sk_sp<DisplayList> display_list,
const SkISize& size,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate,
fml::RefPtr<fml::TaskRunner> raster_task_runner)
: size_(size),
display_list_(std::move(display_list)),
snapshot_delegate_(std::move(snapshot_delegate)),
raster_task_runner_(std::move(raster_task_runner)) {}
DlDeferredImageGPUImpeller::ImageWrapper::~ImageWrapper() {
fml::TaskRunner::RunNowOrPostTask(
raster_task_runner_, [id = reinterpret_cast<uintptr_t>(this),
texture_registry = std::move(texture_registry_)]() {
if (texture_registry) {
texture_registry->UnregisterContextListener(id);
}
});
}
void DlDeferredImageGPUImpeller::ImageWrapper::OnGrContextCreated() {
FML_DCHECK(raster_task_runner_->RunsTasksOnCurrentThread());
SnapshotDisplayList();
}
void DlDeferredImageGPUImpeller::ImageWrapper::OnGrContextDestroyed() {
// Impeller textures do not have threading requirements for deletion, and
texture_.reset();
}
bool DlDeferredImageGPUImpeller::ImageWrapper::isTextureBacked() const {
return texture_ && texture_->IsValid();
}
void DlDeferredImageGPUImpeller::ImageWrapper::SnapshotDisplayList(
std::unique_ptr<LayerTree> layer_tree) {
fml::TaskRunner::RunNowOrPostTask(
raster_task_runner_,
fml::MakeCopyable([weak_this = weak_from_this(),
layer_tree = std::move(layer_tree)]() {
TRACE_EVENT0("flutter", "SnapshotDisplayList (impeller)");
auto wrapper = weak_this.lock();
if (!wrapper) {
return;
}
auto snapshot_delegate = wrapper->snapshot_delegate_;
if (!snapshot_delegate) {
return;
}
wrapper->texture_registry_ = snapshot_delegate->GetTextureRegistry();
wrapper->texture_registry_->RegisterContextListener(
reinterpret_cast<uintptr_t>(wrapper.get()), weak_this);
if (layer_tree) {
wrapper->display_list_ = layer_tree->Flatten(
SkRect::MakeWH(wrapper->size_.width(), wrapper->size_.height()),
wrapper->texture_registry_);
}
auto snapshot = snapshot_delegate->MakeRasterSnapshot(
wrapper->display_list_, wrapper->size_);
if (!snapshot) {
std::scoped_lock lock(wrapper->error_mutex_);
wrapper->error_ = "Failed to create snapshot.";
return;
}
wrapper->texture_ = snapshot->impeller_texture();
}));
}
std::optional<std::string>
DlDeferredImageGPUImpeller::ImageWrapper::get_error() {
std::scoped_lock lock(error_mutex_);
return error_;
}
} // namespace flutter
| engine/lib/ui/painting/display_list_deferred_image_gpu_impeller.cc/0 | {
"file_path": "engine/lib/ui/painting/display_list_deferred_image_gpu_impeller.cc",
"repo_id": "engine",
"token_count": 2597
} | 275 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/lib/ui/painting/image_decoder.h"
#include "flutter/lib/ui/painting/image_decoder_skia.h"
#if IMPELLER_SUPPORTS_RENDERING
#include "flutter/lib/ui/painting/image_decoder_impeller.h"
#endif // IMPELLER_SUPPORTS_RENDERING
namespace flutter {
std::unique_ptr<ImageDecoder> ImageDecoder::Make(
const Settings& settings,
const TaskRunners& runners,
std::shared_ptr<fml::ConcurrentTaskRunner> concurrent_task_runner,
fml::WeakPtr<IOManager> io_manager,
const std::shared_ptr<fml::SyncSwitch>& gpu_disabled_switch) {
#if IMPELLER_SUPPORTS_RENDERING
if (settings.enable_impeller) {
return std::make_unique<ImageDecoderImpeller>(
runners, //
std::move(concurrent_task_runner), //
std::move(io_manager), //
settings.enable_wide_gamut, //
gpu_disabled_switch);
}
#endif // IMPELLER_SUPPORTS_RENDERING
return std::make_unique<ImageDecoderSkia>(
runners, //
std::move(concurrent_task_runner), //
std::move(io_manager) //
);
}
ImageDecoder::ImageDecoder(
const TaskRunners& runners,
std::shared_ptr<fml::ConcurrentTaskRunner> concurrent_task_runner,
fml::WeakPtr<IOManager> io_manager)
: runners_(runners),
concurrent_task_runner_(std::move(concurrent_task_runner)),
io_manager_(std::move(io_manager)),
weak_factory_(this) {
FML_DCHECK(runners_.IsValid());
FML_DCHECK(runners_.GetUITaskRunner()->RunsTasksOnCurrentThread())
<< "The image decoder must be created & collected on the UI thread.";
}
ImageDecoder::~ImageDecoder() = default;
fml::WeakPtr<ImageDecoder> ImageDecoder::GetWeakPtr() const {
return weak_factory_.GetWeakPtr();
}
} // namespace flutter
| engine/lib/ui/painting/image_decoder.cc/0 | {
"file_path": "engine/lib/ui/painting/image_decoder.cc",
"repo_id": "engine",
"token_count": 803
} | 276 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_PAINTING_IMAGE_ENCODING_IMPL_H_
#define FLUTTER_LIB_UI_PAINTING_IMAGE_ENCODING_IMPL_H_
#include "flutter/lib/ui/ui_dart_state.h"
#include "third_party/skia/include/core/SkCanvas.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/core/SkSurface.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
#include "third_party/skia/include/gpu/ganesh/SkSurfaceGanesh.h"
namespace flutter {
template <typename SyncSwitch>
sk_sp<SkImage> ConvertToRasterUsingResourceContext(
const sk_sp<SkImage>& image,
const fml::WeakPtr<GrDirectContext>& resource_context,
const std::shared_ptr<const SyncSwitch>& is_gpu_disabled_sync_switch) {
sk_sp<SkSurface> surface;
SkImageInfo surface_info = SkImageInfo::MakeN32Premul(image->dimensions());
is_gpu_disabled_sync_switch->Execute(
typename SyncSwitch::Handlers()
.SetIfTrue([&surface, &surface_info] {
surface = SkSurfaces::Raster(surface_info);
})
.SetIfFalse([&surface, &surface_info, resource_context] {
if (resource_context) {
surface = SkSurfaces::RenderTarget(
resource_context.get(), skgpu::Budgeted::kNo, surface_info);
} else {
surface = SkSurfaces::Raster(surface_info);
}
}));
if (surface == nullptr || surface->getCanvas() == nullptr) {
FML_LOG(ERROR) << "Could not create a surface to copy the texture into.";
return nullptr;
}
surface->getCanvas()->drawImage(image, 0, 0);
if (resource_context) {
resource_context->flushAndSubmit();
}
auto snapshot = surface->makeImageSnapshot();
if (snapshot == nullptr) {
FML_LOG(ERROR) << "Could not snapshot image to encode.";
return nullptr;
}
return snapshot->makeRasterImage();
}
} // namespace flutter
#endif // FLUTTER_LIB_UI_PAINTING_IMAGE_ENCODING_IMPL_H_
| engine/lib/ui/painting/image_encoding_impl.h/0 | {
"file_path": "engine/lib/ui/painting/image_encoding_impl.h",
"repo_id": "engine",
"token_count": 825
} | 277 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_PAINTING_IMMUTABLE_BUFFER_H_
#define FLUTTER_LIB_UI_PAINTING_IMMUTABLE_BUFFER_H_
#include <cstdint>
#include "flutter/fml/macros.h"
#include "flutter/lib/ui/dart_wrapper.h"
#include "third_party/skia/include/core/SkData.h"
#include "third_party/tonic/dart_library_natives.h"
#include "third_party/tonic/logging/dart_invoke.h"
#include "third_party/tonic/typed_data/typed_list.h"
namespace flutter {
//------------------------------------------------------------------------------
/// A simple opaque handle to an immutable byte buffer suitable for use
/// internally by the engine.
///
/// This data is not known by the Dart VM.
///
/// It is expected that C++ users of this object will not modify the data
/// argument. No Dart side calls are provided to do so.
class ImmutableBuffer : public RefCountedDartWrappable<ImmutableBuffer> {
public:
~ImmutableBuffer() override;
/// Initializes a new ImmutableData from a Dart Uint8List.
///
/// `buffer_handle` is the caller that will be registered as the Dart peer of
/// the native ImmutableBuffer object.
///
/// `data` is a tonic::Uint8List of bytes to copy.
///
/// `callback_handle` is expected to be a void callback to signal when the
/// copy has completed.
static Dart_Handle init(Dart_Handle buffer_handle,
Dart_Handle data,
Dart_Handle callback_handle);
/// Initializes a new ImmutableData from an asset matching a provided
/// asset string.
///
/// The zero indexed argument is the caller that will be registered as the
/// Dart peer of the native ImmutableBuffer object.
///
/// The first indexed argumented is a String corresponding to the asset
/// to load.
///
/// The second indexed argument is expected to be a void callback to signal
/// when the copy has completed.
static Dart_Handle initFromAsset(Dart_Handle buffer_handle,
Dart_Handle asset_name_handle,
Dart_Handle callback_handle);
/// Initializes a new ImmutableData from an File path.
///
/// The zero indexed argument is the caller that will be registered as the
/// Dart peer of the native ImmutableBuffer object.
///
/// The first indexed argumented is a String corresponding to the file path
/// to load.
///
/// The second indexed argument is expected to be a void callback to signal
/// when the copy has completed.
static Dart_Handle initFromFile(Dart_Handle buffer_handle,
Dart_Handle file_path_handle,
Dart_Handle callback_handle);
/// The length of the data in bytes.
size_t length() const {
FML_DCHECK(data_);
return data_->size();
}
/// Callers should not modify the returned data. This is not exposed to Dart.
sk_sp<SkData> data() const { return data_; }
/// Clears the Dart native fields and removes the reference to the underlying
/// byte buffer.
///
/// The byte buffer will continue to live if other objects hold a reference to
/// it.
void dispose() {
data_.reset();
ClearDartWrapper();
}
private:
explicit ImmutableBuffer(sk_sp<SkData> data) : data_(std::move(data)) {}
sk_sp<SkData> data_;
static sk_sp<SkData> MakeSkDataWithCopy(const void* data, size_t length);
DEFINE_WRAPPERTYPEINFO();
FML_FRIEND_MAKE_REF_COUNTED(ImmutableBuffer);
FML_DISALLOW_COPY_AND_ASSIGN(ImmutableBuffer);
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_PAINTING_IMMUTABLE_BUFFER_H_
| engine/lib/ui/painting/immutable_buffer.h/0 | {
"file_path": "engine/lib/ui/painting/immutable_buffer.h",
"repo_id": "engine",
"token_count": 1243
} | 278 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_PAINTING_PICTURE_RECORDER_H_
#define FLUTTER_LIB_UI_PAINTING_PICTURE_RECORDER_H_
#include "flutter/display_list/dl_builder.h"
#include "flutter/lib/ui/dart_wrapper.h"
namespace flutter {
class Canvas;
class Picture;
class PictureRecorder : public RefCountedDartWrappable<PictureRecorder> {
DEFINE_WRAPPERTYPEINFO();
FML_FRIEND_MAKE_REF_COUNTED(PictureRecorder);
public:
static void Create(Dart_Handle wrapper);
~PictureRecorder() override;
sk_sp<DisplayListBuilder> BeginRecording(SkRect bounds);
void endRecording(Dart_Handle dart_picture);
void set_canvas(fml::RefPtr<Canvas> canvas) { canvas_ = std::move(canvas); }
private:
PictureRecorder();
sk_sp<DisplayListBuilder> display_list_builder_;
fml::RefPtr<Canvas> canvas_;
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_PAINTING_PICTURE_RECORDER_H_
| engine/lib/ui/painting/picture_recorder.h/0 | {
"file_path": "engine/lib/ui/painting/picture_recorder.h",
"repo_id": "engine",
"token_count": 365
} | 279 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
part of dart.ui;
/// A wrapper for a raw callback handle.
///
/// This is the return type for [PluginUtilities.getCallbackHandle].
class CallbackHandle {
/// Create an instance using a raw callback handle.
///
/// Only values produced by a call to [CallbackHandle.toRawHandle] should be
/// used, otherwise this object will be an invalid handle.
CallbackHandle.fromRawHandle(this._handle);
final int _handle;
/// Get the raw callback handle to pass over a [MethodChannel] or [SendPort]
/// (to pass to another [Isolate]).
int toRawHandle() => _handle;
@override
bool operator ==(Object other) {
if (runtimeType != other.runtimeType) {
return false;
}
return other is CallbackHandle
&& other._handle == _handle;
}
@override
int get hashCode => _handle.hashCode;
}
/// Functionality for Flutter plugin authors.
///
/// See also:
///
/// * [IsolateNameServer], which provides utilities for dealing with
/// [Isolate]s.
abstract final class PluginUtilities {
static final Map<Function, CallbackHandle?> _forwardCache =
<Function, CallbackHandle?>{};
static final Map<CallbackHandle, Function?> _backwardCache =
<CallbackHandle, Function?>{};
/// Get a handle to a named top-level or static callback function which can
/// be easily passed between isolates.
///
/// The `callback` argument must not be null.
///
/// Returns a [CallbackHandle] that can be provided to
/// [PluginUtilities.getCallbackFromHandle] to retrieve a tear-off of the
/// original callback. If `callback` is not a top-level or static function,
/// null is returned.
static CallbackHandle? getCallbackHandle(Function callback) {
return _forwardCache.putIfAbsent(callback, () {
final int? handle = _getCallbackHandle(callback);
return handle != null ? CallbackHandle.fromRawHandle(handle) : null;
});
}
/// Get a tear-off of a named top-level or static callback represented by a
/// handle.
///
/// The `handle` argument must not be null.
///
/// If `handle` is not a valid handle returned by
/// [PluginUtilities.getCallbackHandle], null is returned. Otherwise, a
/// tear-off of the callback associated with `handle` is returned.
static Function? getCallbackFromHandle(CallbackHandle handle) {
return _backwardCache.putIfAbsent(
handle, () => _getCallbackFromHandle(handle.toRawHandle()));
}
}
| engine/lib/ui/plugins.dart/0 | {
"file_path": "engine/lib/ui/plugins.dart",
"repo_id": "engine",
"token_count": 759
} | 280 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
part of dart.ui;
@pragma('vm:entry-point')
void _setupHooks() {
assert(() {
// In debug mode, register the schedule frame extension.
developer.registerExtension('ext.ui.window.scheduleFrame', _scheduleFrame);
// In debug mode, allow shaders to be reinitialized.
developer.registerExtension(
'ext.ui.window.reinitializeShader',
_reinitializeShader,
);
return true;
}());
// In debug and profile mode, allow tools to display the current rendering backend.
if (!_kReleaseMode) {
developer.registerExtension(
'ext.ui.window.impellerEnabled',
_getImpellerEnabled,
);
}
}
| engine/lib/ui/setup_hooks.dart/0 | {
"file_path": "engine/lib/ui/setup_hooks.dart",
"repo_id": "engine",
"token_count": 270
} | 281 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_VOLATILE_PATH_TRACKER_H_
#define FLUTTER_LIB_UI_VOLATILE_PATH_TRACKER_H_
#include <deque>
#include <memory>
#include <mutex>
#include <vector>
#include "flutter/fml/macros.h"
#include "flutter/fml/task_runner.h"
#include "flutter/fml/trace_event.h"
#include "third_party/skia/include/core/SkPath.h"
namespace flutter {
namespace testing {
class ShellTest;
} // namespace testing
/// A cache for paths drawn from dart:ui.
///
/// Whenever a flutter::CanvasPath is created, it must Insert an entry into
/// this cache. Whenever a frame is drawn, the shell must call OnFrame. The
/// cache will flip the volatility bit on the SkPath and remove it from the
/// cache. If the Dart object is released, Erase must be called to avoid
/// tracking a path that is no longer referenced in Dart code.
///
/// Enabling this cache may cause difficult to predict minor pixel differences
/// when paths are rendered. If deterministic rendering is needed, e.g. for a
/// screen diffing test, this class will not cache any paths and will
/// automatically set the volatility of the path to false.
class VolatilePathTracker {
public:
/// The fields of this struct must only accessed on the UI task runner.
struct TrackedPath {
bool tracking_volatility = false;
int frame_count = 0;
SkPath path;
};
VolatilePathTracker(fml::RefPtr<fml::TaskRunner> ui_task_runner,
bool enabled);
static constexpr int kFramesOfVolatility = 2;
// Starts tracking a path.
// Must be called from the UI task runner.
//
// Callers should only insert paths that are currently volatile.
void Track(const std::shared_ptr<TrackedPath>& path);
// Called by the shell at the end of a frame after notifying Dart about idle
// time.
//
// This method will flip the volatility bit to false for any paths that have
// survived the |kFramesOfVolatility|.
//
// Must be called from the UI task runner.
void OnFrame();
bool enabled() const { return enabled_; }
private:
fml::RefPtr<fml::TaskRunner> ui_task_runner_;
std::vector<std::weak_ptr<TrackedPath>> paths_;
bool enabled_ = true;
friend class testing::ShellTest;
FML_DISALLOW_COPY_AND_ASSIGN(VolatilePathTracker);
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_VOLATILE_PATH_TRACKER_H_
| engine/lib/ui/volatile_path_tracker.h/0 | {
"file_path": "engine/lib/ui/volatile_path_tracker.h",
"repo_id": "engine",
"token_count": 767
} | 282 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_DART_H_
#define FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_DART_H_
#include "flutter/fml/message_loop.h"
#include "flutter/lib/ui/window/platform_message_response.h"
#include "third_party/tonic/dart_persistent_value.h"
namespace flutter {
class PlatformMessageResponseDart : public PlatformMessageResponse {
FML_FRIEND_MAKE_REF_COUNTED(PlatformMessageResponseDart);
public:
// Callable on any thread.
void Complete(std::unique_ptr<fml::Mapping> data) override;
void CompleteEmpty() override;
protected:
explicit PlatformMessageResponseDart(
tonic::DartPersistentValue callback,
fml::RefPtr<fml::TaskRunner> ui_task_runner,
const std::string& channel);
~PlatformMessageResponseDart() override;
tonic::DartPersistentValue callback_;
fml::RefPtr<fml::TaskRunner> ui_task_runner_;
const std::string channel_;
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_DART_H_
| engine/lib/ui/window/platform_message_response_dart.h/0 | {
"file_path": "engine/lib/ui/window/platform_message_response_dart.h",
"repo_id": "engine",
"token_count": 417
} | 283 |
# Web-specific coding conventions and terminology
Here you will find various naming and structural conventions used in the Web
engine code. This is not a code style guide. For code style refer to
[Flutter's style guide][1]. This document does not apply outside the `web_ui`
directory.
## CanvasKit Renderer
All code specific to the CanvasKit renderer lives in `lib/src/engine/canvaskit`.
CanvasKit bindings should use the exact names defined in CanvasKit's JavaScript
API, even if it violates Flutter's style guide, such as function names that
start with a capital letter (e.g. "MakeSkVertices"). This makes it easier to find
the relevant code in Skia's source code. CanvasKit bindings should all go in
the `canvaskit_api.dart` file.
Files and directories should use all-lower-case "canvaskit", without
capitalization or punctuation (such as "canvasKit", "canvas-kit", "canvas_kit").
This is consistent with Skia's conventions.
Variable, function, method, and class names should use camel case, i.e.
"canvasKit", "CanvasKit".
In documentation (doc comments, flutter.dev website, markdown files,
blog posts, etc) refer to Flutter's usage of CanvasKit as "CanvasKit renderer"
(to avoid confusion with CanvasKit as the standalone library, which can be used
without Flutter).
Classes that wrap CanvasKit classes should replace the `Sk` class prefix with
`Ck` (which stands for "CanvasKit"), e.g. `CkPaint` wraps `SkPaint`, `CkImage`
wraps `SkImage`.
## HTML Renderer
All code specific to the HTML renderer lives in `lib/src/engine/html`.
In documentation (doc comments, flutter.dev website, markdown files,
blog posts, etc) refer to Flutter's HTML implementation as "HTML renderer". We
include SVG, CSS, and Canvas 2D under the "HTML" umbrella.
The implementation of the layer system uses the term "surface" to refer to
layers. We rely on persisting the DOM information across frames to gain
efficiency. Each concrete implementation of the `Surface` class should start
with the prefix `Persisted`, e.g. `PersistedOpacity`, `PersistedPicture`.
## Semantics
The semantics (accessibility) code is shared between CanvasKit and HTML. All
semantics code lives in `lib/src/engine/semantics`.
## Text editing
Text editing code is shared between CanvasKit and HTML, and it lives in
`lib/src/engine/text_editing`.
## Common utilities
Small common utilities do not need dedicated directories. It is OK to put all
such utilities in `lib/src/engine` (see, for example, `alarm_clock.dart`).
[1]: https://github.com/flutter/flutter/wiki/Style-guide-for-Flutter-repo
| engine/lib/web_ui/CODE_CONVENTIONS.md/0 | {
"file_path": "engine/lib/web_ui/CODE_CONVENTIONS.md",
"repo_id": "engine",
"token_count": 714
} | 284 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:io';
import 'package:test_api/backend.dart';
import 'browser.dart';
import 'browser_process.dart';
import 'common.dart';
import 'edge_installation.dart';
import 'package_lock.dart';
/// Provides an environment for the desktop Microsoft Edge (Chromium-based).
class EdgeEnvironment implements BrowserEnvironment {
@override
final String name = 'Edge';
@override
Future<Browser> launchBrowserInstance(Uri url, {bool debug = false}) async {
return Edge(url);
}
@override
Runtime get packageTestRuntime => Runtime.edge;
@override
Future<void> prepare() async {
// Edge doesn't need any special prep.
}
@override
Future<void> cleanup() async {}
@override
String get packageTestConfigurationYamlFile => 'dart_test_edge.yaml';
}
/// Runs desktop Edge.
///
/// Most of the communication with the browser is expected to happen via HTTP,
/// so this exposes a bare-bones API. The browser starts as soon as the class is
/// constructed, and is killed when [close] is called.
///
/// Any errors starting or running the process are reported through [onExit].
class Edge extends Browser {
/// Starts a new instance of Safari open to the given [url], which may be a
/// [Uri] or a [String].
factory Edge(Uri url) {
return Edge._(BrowserProcess(() async {
final BrowserInstallation installation = await getEdgeInstallation(
packageLock.edgeLock.launcherVersion,
infoLog: DevNull(),
);
// Debug is not a valid option for Edge. Remove it.
String pathToOpen = url.toString();
if(pathToOpen.contains('debug')) {
final int index = pathToOpen.indexOf('debug');
pathToOpen = pathToOpen.substring(0, index-1);
}
final Process process = await Process.start(
installation.executable,
<String>[pathToOpen,'-k'],
);
return process;
}));
}
Edge._(this._process);
final BrowserProcess _process;
@override
Future<void> get onExit => _process.onExit;
@override
Future<void> close() => _process.close();
}
| engine/lib/web_ui/dev/edge.dart/0 | {
"file_path": "engine/lib/web_ui/dev/edge.dart",
"repo_id": "engine",
"token_count": 727
} | 285 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:convert' show ByteConversionSink, jsonDecode, utf8;
import 'dart:io' as io;
import 'dart:typed_data';
import 'package:args/command_runner.dart';
import 'package:convert/convert.dart';
import 'package:crypto/crypto.dart' as crypto;
import 'package:http/http.dart' as http;
import 'package:path/path.dart' as path;
// ignore: avoid_relative_lib_imports
import '../lib/src/engine/noto_font_encoding.dart';
import 'cipd.dart';
import 'environment.dart';
import 'exceptions.dart';
import 'utils.dart';
const String expectedUrlPrefix = 'https://fonts.gstatic.com/s/';
class RollFallbackFontsCommand extends Command<bool>
with ArgUtils<bool> {
RollFallbackFontsCommand() {
argParser.addOption(
'key',
defaultsTo: '',
help: 'The Google Fonts API key. Used to get data about fonts hosted on '
'Google Fonts.',
);
argParser.addFlag(
'dry-run',
help: 'Whether or not to push changes to CIPD. When --dry-run is set, the '
'script will download everything and attempt to prepare the bundle '
'but will stop before publishing. When not set, the bundle will be '
'published.',
negatable: false,
);
}
@override
final String name = 'roll-fallback-fonts';
@override
final String description = 'Generate fallback font data from GoogleFonts and '
'upload fonts to cipd.';
String get apiKey => stringArg('key');
bool get isDryRun => boolArg('dry-run');
@override
Future<bool> run() async {
await _generateFallbackFontData();
return true;
}
Future<void> _generateFallbackFontData() async {
if (apiKey.isEmpty) {
throw UsageException('No Google Fonts API key provided', argParser.usage);
}
final http.Client client = http.Client();
final http.Response response = await client.get(Uri.parse(
'https://www.googleapis.com/webfonts/v1/webfonts?key=$apiKey'));
if (response.statusCode != 200) {
throw ToolExit('Failed to download Google Fonts list.');
}
final Map<String, dynamic> googleFontsResult =
jsonDecode(response.body) as Map<String, dynamic>;
final List<Map<String, dynamic>> fontDatas =
(googleFontsResult['items'] as List<dynamic>)
.cast<Map<String, dynamic>>();
final Map<String, Uri> urlForFamily = <String, Uri>{};
for (final Map<String, dynamic> fontData in fontDatas) {
if (fallbackFonts.contains(fontData['family'])) {
final Uri uri = Uri.parse(fontData['files']['regular'] as String)
.replace(scheme: 'https');
urlForFamily[fontData['family'] as String] = uri;
}
}
final Map<String, String> charsetForFamily = <String, String>{};
final io.Directory fontDir = await io.Directory.systemTemp.createTemp('flutter_fallback_fonts');
print('Downloading fonts into temp directory: ${fontDir.path}');
final AccumulatorSink<crypto.Digest> hashSink = AccumulatorSink<crypto.Digest>();
final ByteConversionSink hasher = crypto.sha256.startChunkedConversion(hashSink);
for (final String family in fallbackFonts) {
print('Downloading $family...');
final Uri? uri = urlForFamily[family];
if (uri == null) {
throw ToolExit('Unable to determine URL to download $family. '
'Check if it is still hosted on Google Fonts.');
}
final http.Response fontResponse = await client.get(uri);
if (fontResponse.statusCode != 200) {
throw ToolExit('Failed to download font for $family');
}
final String urlString = uri.toString();
if (!urlString.startsWith(expectedUrlPrefix)) {
throw ToolExit('Unexpected url format received from Google Fonts API: $urlString.');
}
final String urlSuffix = urlString.substring(expectedUrlPrefix.length);
final io.File fontFile =
io.File(path.join(fontDir.path, urlSuffix));
final Uint8List bodyBytes = fontResponse.bodyBytes;
if (!_checkForLicenseAttribution(bodyBytes)) {
throw ToolExit(
'Expected license attribution not found in file: $urlString');
}
hasher.add(utf8.encode(urlSuffix));
hasher.add(bodyBytes);
await fontFile.create(recursive: true);
await fontFile.writeAsBytes(bodyBytes, flush: true);
final io.ProcessResult fcQueryResult =
await io.Process.run('fc-query', <String>[
'--format=%{charset}',
'--',
fontFile.path,
]);
final String encodedCharset = fcQueryResult.stdout as String;
charsetForFamily[family] = encodedCharset;
}
final StringBuffer sb = StringBuffer();
final List<_Font> fonts = <_Font>[];
for (final String family in fallbackFonts) {
final List<int> starts = <int>[];
final List<int> ends = <int>[];
final String charset = charsetForFamily[family]!;
for (final String range in charset.split(' ')) {
// Range is one hexadecimal number or two, separated by `-`.
final List<String> parts = range.split('-');
if (parts.length != 1 && parts.length != 2) {
throw ToolExit('Malformed charset range "$range"');
}
final int first = int.parse(parts.first, radix: 16);
final int last = int.parse(parts.last, radix: 16);
starts.add(first);
ends.add(last);
}
fonts.add(_Font(family, fonts.length, starts, ends));
}
final String fontSetsCode = _computeEncodedFontSets(fonts);
sb.writeln('// Copyright 2013 The Flutter Authors. All rights reserved.');
sb.writeln('// Use of this source code is governed by a BSD-style license '
'that can be');
sb.writeln('// found in the LICENSE file.');
sb.writeln();
sb.writeln('// DO NOT EDIT! This file is generated. See:');
sb.writeln('// dev/roll_fallback_fonts.dart');
sb.writeln("import 'noto_font.dart';");
sb.writeln();
sb.writeln('List<NotoFont> getFallbackFontList(bool useColorEmoji) => <NotoFont>[');
for (final _Font font in fonts) {
final String family = font.family;
String enabledArgument = '';
if (family == 'Noto Emoji') {
enabledArgument = 'enabled: !useColorEmoji, ';
}
if (family == 'Noto Color Emoji') {
enabledArgument = 'enabled: useColorEmoji, ';
}
final String urlString = urlForFamily[family]!.toString();
if (!urlString.startsWith(expectedUrlPrefix)) {
throw ToolExit(
'Unexpected url format received from Google Fonts API: $urlString.');
}
final String urlSuffix = urlString.substring(expectedUrlPrefix.length);
sb.writeln(" NotoFont('$family', $enabledArgument'$urlSuffix'),");
}
sb.writeln('];');
sb.writeln();
sb.write(fontSetsCode);
final io.File fontDataFile = io.File(path.join(
environment.webUiRootDir.path,
'lib',
'src',
'engine',
'font_fallback_data.dart',
));
await fontDataFile.writeAsString(sb.toString());
final io.File licenseFile = io.File(path.join(
fontDir.path,
'LICENSE.txt',
));
const String licenseString = r'''
© Copyright 2015-2021 Google LLC. All Rights Reserved.
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at:
http://scripts.sil.org/OFL
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide
development of collaborative font projects, to support the font creation
efforts of academic and linguistic communities, and to provide a free and
open framework in which fonts may be shared and improved in partnership
with others.
The OFL allows the licensed fonts to be used, studied, modified and
redistributed freely as long as they are not sold by themselves. The
fonts, including any derivative works, can be bundled, embedded,
redistributed and/or sold with any software provided that any reserved
names are not used by derivative works. The fonts and derivatives,
however, cannot be released under any other type of license. The
requirement for fonts to remain under this license does not apply
to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright
Holder(s) under this license and clearly marked as such. This may
include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the
copyright statement(s).
"Original Version" refers to the collection of Font Software components as
distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting,
or substituting -- in part or in whole -- any of the components of the
Original Version, by changing formats or by porting the Font Software to a
new environment.
"Author" refers to any designer, engineer, programmer, technical
writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Font Software, to use, study, copy, merge, embed, modify,
redistribute, and sell modified and unmodified copies of the Font
Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components,
in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled,
redistributed and/or sold with any software, provided that each copy
contains the above copyright notice and this license. These can be
included either as stand-alone text files, human-readable headers or
in the appropriate machine-readable metadata fields within text or
binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font
Name(s) unless explicit written permission is granted by the corresponding
Copyright Holder. This restriction only applies to the primary font name as
presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
Software shall not be used to promote, endorse or advertise any
Modified Version, except to acknowledge the contribution(s) of the
Copyright Holder(s) and the Author(s) or with their explicit written
permission.
5) The Font Software, modified or unmodified, in part or in whole,
must be distributed entirely under this license, and must not be
distributed under any other license. The requirement for fonts to
remain under this license does not apply to any document created
using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are
not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.
''';
final List<int> licenseData = utf8.encode(licenseString);
await licenseFile.create(recursive: true);
await licenseFile.writeAsBytes(licenseData);
hasher.add(licenseData);
hasher.close();
final crypto.Digest digest = hashSink.events.single;
final String versionString = digest.toString();
const String packageName = 'flutter/flutter_font_fallbacks';
if (await cipdKnowsPackageVersion(
package: packageName,
versionTag: versionString)) {
print('Package already exists with hash $versionString. Skipping upload');
} else {
print('Uploading fallback fonts to CIPD with hash $versionString');
await uploadDirectoryToCipd(
directory: fontDir,
packageName: packageName,
configFileName: 'cipd.flutter_font_fallbacks.yaml',
description: 'A set of Noto fonts to fall back to for use in testing.',
root: fontDir.path,
version: versionString,
isDryRun: isDryRun,
);
}
print('Setting new fallback fonts deps version to $versionString');
final String depFilePath = path.join(
environment.engineSrcDir.path,
'flutter',
'DEPS',
);
await runProcess('gclient', <String>[
'setdep',
'--revision=src/flutter/third_party/google_fonts_for_unit_tests:$packageName@$versionString',
'--deps-file=$depFilePath'
]);
}
}
const List<String> fallbackFonts = <String>[
'Noto Sans',
'Noto Color Emoji',
'Noto Emoji',
'Noto Music',
'Noto Sans Symbols',
'Noto Sans Symbols 2',
'Noto Sans Adlam',
'Noto Sans Anatolian Hieroglyphs',
'Noto Sans Arabic',
'Noto Sans Armenian',
'Noto Sans Avestan',
'Noto Sans Balinese',
'Noto Sans Bamum',
'Noto Sans Bassa Vah',
'Noto Sans Batak',
'Noto Sans Bengali',
'Noto Sans Bhaiksuki',
'Noto Sans Brahmi',
'Noto Sans Buginese',
'Noto Sans Buhid',
'Noto Sans Canadian Aboriginal',
'Noto Sans Carian',
'Noto Sans Caucasian Albanian',
'Noto Sans Chakma',
'Noto Sans Cham',
'Noto Sans Cherokee',
'Noto Sans Coptic',
'Noto Sans Cuneiform',
'Noto Sans Cypriot',
'Noto Sans Deseret',
'Noto Sans Devanagari',
'Noto Sans Duployan',
'Noto Sans Egyptian Hieroglyphs',
'Noto Sans Elbasan',
'Noto Sans Elymaic',
'Noto Sans Georgian',
'Noto Sans Glagolitic',
'Noto Sans Gothic',
'Noto Sans Grantha',
'Noto Sans Gujarati',
'Noto Sans Gunjala Gondi',
'Noto Sans Gurmukhi',
'Noto Sans HK',
'Noto Sans Hanunoo',
'Noto Sans Hatran',
'Noto Sans Hebrew',
'Noto Sans Imperial Aramaic',
'Noto Sans Indic Siyaq Numbers',
'Noto Sans Inscriptional Pahlavi',
'Noto Sans Inscriptional Parthian',
'Noto Sans JP',
'Noto Sans Javanese',
'Noto Sans KR',
'Noto Sans Kaithi',
'Noto Sans Kannada',
'Noto Sans Kayah Li',
'Noto Sans Kharoshthi',
'Noto Sans Khmer',
'Noto Sans Khojki',
'Noto Sans Khudawadi',
'Noto Sans Lao',
'Noto Sans Lepcha',
'Noto Sans Limbu',
'Noto Sans Linear A',
'Noto Sans Linear B',
'Noto Sans Lisu',
'Noto Sans Lycian',
'Noto Sans Lydian',
'Noto Sans Mahajani',
'Noto Sans Malayalam',
'Noto Sans Mandaic',
'Noto Sans Manichaean',
'Noto Sans Marchen',
'Noto Sans Masaram Gondi',
'Noto Sans Math',
'Noto Sans Mayan Numerals',
'Noto Sans Medefaidrin',
'Noto Sans Meetei Mayek',
'Noto Sans Meroitic',
'Noto Sans Miao',
'Noto Sans Modi',
'Noto Sans Mongolian',
'Noto Sans Mro',
'Noto Sans Multani',
'Noto Sans Myanmar',
'Noto Sans NKo',
'Noto Sans Nabataean',
'Noto Sans New Tai Lue',
'Noto Sans Newa',
'Noto Sans Nushu',
'Noto Sans Ogham',
'Noto Sans Ol Chiki',
'Noto Sans Old Hungarian',
'Noto Sans Old Italic',
'Noto Sans Old North Arabian',
'Noto Sans Old Permic',
'Noto Sans Old Persian',
'Noto Sans Old Sogdian',
'Noto Sans Old South Arabian',
'Noto Sans Old Turkic',
'Noto Sans Oriya',
'Noto Sans Osage',
'Noto Sans Osmanya',
'Noto Sans Pahawh Hmong',
'Noto Sans Palmyrene',
'Noto Sans Pau Cin Hau',
'Noto Sans Phags Pa',
'Noto Sans Phoenician',
'Noto Sans Psalter Pahlavi',
'Noto Sans Rejang',
'Noto Sans Runic',
'Noto Sans SC',
'Noto Sans Saurashtra',
'Noto Sans Sharada',
'Noto Sans Shavian',
'Noto Sans Siddham',
'Noto Sans Sinhala',
'Noto Sans Sogdian',
'Noto Sans Sora Sompeng',
'Noto Sans Soyombo',
'Noto Sans Sundanese',
'Noto Sans Syloti Nagri',
'Noto Sans Syriac',
'Noto Sans TC',
'Noto Sans Tagalog',
'Noto Sans Tagbanwa',
'Noto Sans Tai Le',
'Noto Sans Tai Tham',
'Noto Sans Tai Viet',
'Noto Sans Takri',
'Noto Sans Tamil',
'Noto Sans Tamil Supplement',
'Noto Sans Telugu',
'Noto Sans Thaana',
'Noto Sans Thai',
'Noto Sans Tifinagh',
'Noto Sans Tirhuta',
'Noto Sans Ugaritic',
'Noto Sans Vai',
'Noto Sans Wancho',
'Noto Sans Warang Citi',
'Noto Sans Yi',
'Noto Sans Zanabazar Square',
];
bool _checkForLicenseAttribution(Uint8List fontBytes) {
final ByteData fontData = fontBytes.buffer.asByteData();
final int codePointCount = fontData.lengthInBytes ~/ 2;
const String attributionString =
'This Font Software is licensed under the SIL Open Font License, Version 1.1.';
for (int i = 0; i < codePointCount - attributionString.length; i++) {
bool match = true;
for (int j = 0; j < attributionString.length; j++) {
if (fontData.getUint16((i + j) * 2) != attributionString.codeUnitAt(j)) {
match = false;
break;
}
}
if (match) {
return true;
}
}
return false;
}
class _Font {
_Font(this.family, this.index, this.starts, this.ends);
final String family;
final int index;
final List<int> starts;
final List<int> ends; // inclusive ends
static int compare(_Font a, _Font b) => a.index.compareTo(b.index);
String get shortName =>
_shortName +
String.fromCharCodes(
'$index'.codeUnits.map((int ch) => ch - 48 + 0x2080));
String get _shortName => family.startsWith('Noto Sans ')
? family.substring('Noto Sans '.length)
: family;
}
/// The boundary of a range of a font.
class _Boundary {
_Boundary(this.value, this.isStart, this.font);
final int value; // inclusive start or exclusive end.
final bool isStart;
final _Font font;
static int compare(_Boundary a, _Boundary b) => a.value.compareTo(b.value);
}
class _Range {
_Range(this.start, this.end, this.fontSet);
final int start;
final int end;
final _FontSet fontSet;
@override
String toString() {
return '[${start.toRadixString(16)}, ${end.toRadixString(16)}]'
' (${end - start + 1})'
' ${fontSet.description()}';
}
}
/// A canonical representative for a set of _Fonts. The fonts are stored in
/// order of increasing `_Font.index`.
class _FontSet {
_FontSet(this.fonts);
/// The number of [_Font]s in this set.
int get length => fonts.length;
/// The members of this set.
final List<_Font> fonts;
/// Number of unicode ranges that are supported by this set of fonts.
int rangeCount = 0;
/// The serialization order of this set. This index is assigned after building
/// all the sets.
late final int index;
static int orderByDecreasingRangeCount(_FontSet a, _FontSet b) {
final int r = b.rangeCount.compareTo(a.rangeCount);
if (r != 0) {
return r;
}
return orderByLexicographicFontIndexes(a, b);
}
static int orderByLexicographicFontIndexes(_FontSet a, _FontSet b) {
for (int i = 0; i < a.length && i < b.length; i++) {
final int r = _Font.compare(a.fonts[i], b.fonts[i]);
if (r != 0) {
return r;
}
}
assert(a.length != b.length); // _FontSets are canonical.
return a.length - b.length;
}
@override
String toString() {
return description();
}
String description() {
return fonts.map((_Font font) => font.shortName).join(', ');
}
}
/// A trie node [1] used to find the canonical _FontSet.
///
/// [1]: https://en.wikipedia.org/wiki/Trie
class _TrieNode {
final Map<_Font, _TrieNode> _children = <_Font, _TrieNode>{};
_FontSet? fontSet;
/// Inserts a string of fonts into the trie and returns the trie node
/// representing the string. [this] must be the root node of the trie.
///
/// Inserting the same sequence again will traverse the same path through the
/// trie and return the same node, canonicalizing the sequence to its
/// representative node.
_TrieNode insertSequenceAtRoot(Iterable<_Font> fonts) {
_TrieNode node = this;
for (final _Font font in fonts) {
node = node._children[font] ??= _TrieNode();
}
return node;
}
}
/// Computes the Dart source code for the encoded data structures used by the
/// fallback font selection algorithm.
///
/// The data structures allow the fallback font selection algorithm to quickly
/// determine which fonts support a given code point. The structures are
/// essentially a map from a code point to a set of fonts that support that code
/// point.
///
/// The universe of code points is partitioned into a set of subsets, or
/// components, where each component contains all the code points that are in
/// exactly the same set of fonts. A font can be considered to be a union of
/// some subset of the components and may share components with other fonts. A
/// `_FontSet` is used to represent a component and the set of fonts that use
/// the component. One way to visualize this is as a Venn diagram. The fonts are
/// the overlapping circles and the components are the spaces between the lines.
///
/// The emitted data structures are
///
/// (1) A list of sets of fonts.
/// (2) A list of code point ranges mapping to an index of list (1).
///
/// Each set of fonts is represented as a list of font indexes. The indexes are
/// always increasing so the delta is stored. The stored value is biased by -1
/// (i.e. `delta - 1`) since a delta is never less than 1. The deltas are STMR
/// encoded.
///
/// A code point with no fonts is mapped to an empty set of fonts. This allows
/// the list of code point ranges to be complete, covering every code
/// point. There are no gaps between ranges; instead there are some ranges that
/// map to the empty set. Each range is encoded as the size (number of code
/// points) in the range followed by the value which is the index of the
/// corresponding set in the list of sets.
///
///
/// STMR (Self terminating multiple radix) encoding
/// ---
///
/// This encoding is a minor adaptation of [VLQ encoding][1], using different
/// ranges of characters to represent continuing or terminating digits instead
/// of using a 'continuation' bit.
///
/// The separators between the numbers can be a significant proportion of the
/// number of characters needed to encode a sequence of numbers as a string.
/// Instead values are encoded with two kinds of digits: prefix digits and
/// terminating digits. Each kind of digit uses a different set of characters,
/// and the radix (number of digit characters) can differ between the different
/// kinds of digit. Lets say we use decimal digits `0`..`9` for prefix digits
/// and `A`..`Z` as terminating digits.
///
/// M = ('M' - 'A') = 12
/// 38M = (3 * 10 + 8) * 26 + 12 = 38 * 26 + 12 = 1000
///
/// Choosing a large terminating radix is especially effective when most of the
/// encoded values are small, as is the case with delta-encoding.
///
/// There can be multiple terminating digit kinds to represent different sorts
/// of values. For the range table, the size uses a different terminating digit,
/// 'a'..'z'. This allows the very common size of 1 (accounting over a third of
/// the range sizes) to be omitted. A range is encoded as either
/// `<size><value>`, or `<value>` with an implicit size of 1. Since the size 1
/// can be implicit, it is always implicit, and the stored sizes are biased by
/// -2.
///
/// | encoding | value | size |
/// | :--- | ---: | ---: |
/// | A | 0 | 1 |
/// | B | 1 | 1 |
/// | 38M | 1000 | 1 |
/// | aA | 0 | 2 |
/// | bB | 1 | 3 |
/// | zZ | 25 | 27 |
/// | 1a1A | 26 | 28 |
/// | 38a38M | 1000 | 1002 |
///
/// STMR-encoded strings are decoded efficiently by a simple loop that updates
/// the current value and performs some additional operation for a terminating
/// digit, e.g. recording the optional size, or creating a range.
///
/// [1]: https://en.wikipedia.org/wiki/Variable-length_quantity
String _computeEncodedFontSets(List<_Font> fonts) {
final List<_Range> ranges = <_Range>[];
final List<_FontSet> allSets = <_FontSet>[];
{
// The fonts have their supported code points provided as list of inclusive
// [start, end] ranges. We want to intersect all of these ranges and find
// the fonts that overlap each intersected range.
//
// It is easier to work with the boundaries of the ranges rather than the
// ranges themselves. The boundaries of the intersected ranges is the union
// of the boundaries of the individual font ranges. We scan the boundaries
// in increasing order, keeping track of the current set of fonts that are
// in the current intersected range. Each time the boundary value changes,
// the current set of fonts is canonicalized and recorded.
//
// There has to be a wiki article for this algorithm but I didn't find one.
final List<_Boundary> boundaries = <_Boundary>[];
for (final _Font font in fonts) {
for (final int start in font.starts) {
boundaries.add(_Boundary(start, true, font));
}
for (final int end in font.ends) {
boundaries.add(_Boundary(end + 1, false, font));
}
}
boundaries.sort(_Boundary.compare);
// The trie root represents the empty set of fonts.
final _TrieNode trieRoot = _TrieNode();
final Set<_Font> currentElements = <_Font>{};
void newRange(int start, int end) {
// Ensure we are using the canonical font order.
final List<_Font> fonts = List<_Font>.of(currentElements)
..sort(_Font.compare);
final _TrieNode node = trieRoot.insertSequenceAtRoot(fonts);
final _FontSet fontSet = node.fontSet ??= _FontSet(fonts);
if (fontSet.rangeCount == 0) {
allSets.add(fontSet);
}
fontSet.rangeCount++;
final _Range range = _Range(start, end, fontSet);
ranges.add(range);
}
int start = 0;
for (final _Boundary boundary in boundaries) {
final int value = boundary.value;
if (value > start) {
// Boundary has changed, record the pending range `[start, value - 1]`,
// and start a new range at `value`. `value` must be > 0 to get here.
newRange(start, value - 1);
start = value;
}
if (boundary.isStart) {
currentElements.add(boundary.font);
} else {
currentElements.remove(boundary.font);
}
}
assert(currentElements.isEmpty);
// Ensure the ranges cover the whole unicode code point space.
if (start <= kMaxCodePoint) {
newRange(start, kMaxCodePoint);
}
}
print('${allSets.length} sets covering ${ranges.length} ranges');
// Sort _FontSets by the number of ranges that map to that _FontSet, so that
// _FontSets that are referenced from many ranges have smaller indexes. This
// makes the range table encoding smaller, by about half.
allSets.sort(_FontSet.orderByDecreasingRangeCount);
for (int i = 0; i < allSets.length; i++) {
allSets[i].index = i;
}
final StringBuffer code = StringBuffer();
final StringBuffer sb = StringBuffer();
int totalEncodedLength = 0;
void encode(int value, int radix, int firstDigitCode) {
final int prefix = value ~/ radix;
assert(kPrefixDigit0 == '0'.codeUnitAt(0) && kPrefixRadix == 10);
if (prefix != 0) {
sb.write(prefix);
}
sb.writeCharCode(firstDigitCode + value.remainder(radix));
}
for (final _FontSet fontSet in allSets) {
int previousFontIndex = -1;
for (final _Font font in fontSet.fonts) {
final int fontIndexDelta = font.index - previousFontIndex;
previousFontIndex = font.index;
encode(fontIndexDelta - 1, kFontIndexRadix, kFontIndexDigit0);
}
if (fontSet != allSets.last) {
sb.write(',');
}
final String fragment = sb.toString();
sb.clear();
totalEncodedLength += fragment.length;
final int length = fontSet.fonts.length;
code.write(' // #${fontSet.index}: $length font');
if (length != 1) {
code.write('s');
}
if (length > 0) {
code.write(': ${fontSet.description()}');
}
code.writeln('.');
code.writeln(" '$fragment'");
}
final StringBuffer declarations = StringBuffer();
final int references =
allSets.fold(0, (int sum, _FontSet set) => sum + set.length);
declarations
..writeln('// ${allSets.length} unique sets of fonts'
' containing $references font references'
' encoded in $totalEncodedLength characters')
..writeln('const String encodedFontSets =')
..write(code)
..writeln(' ;');
// Encode ranges.
code.clear();
totalEncodedLength = 0;
for (final _Range range in ranges) {
final int start = range.start;
final int end = range.end;
final int index = range.fontSet.index;
final int size = end - start + 1;
// Encode <size><index> or <index> for unit ranges.
if (size >= 2) {
encode(size - 2, kRangeSizeRadix, kRangeSizeDigit0);
}
encode(index, kRangeValueRadix, kRangeValueDigit0);
final String encoding = sb.toString();
sb.clear();
totalEncodedLength += encoding.length;
String description = start.toRadixString(16);
if (end != start) {
description = '$description-${end.toRadixString(16)}';
}
if (range.fontSet.fonts.isNotEmpty) {
description = '${description.padRight(12)} #$index';
}
final String encodingText = "'$encoding'".padRight(10);
code.writeln(' $encodingText // $description');
}
declarations
..writeln()
..writeln(
'// ${ranges.length} ranges encoded in $totalEncodedLength characters')
..writeln('const String encodedFontSetRanges =')
..write(code)
..writeln(' ;');
return declarations.toString();
}
| engine/lib/web_ui/dev/roll_fallback_fonts.dart/0 | {
"file_path": "engine/lib/web_ui/dev/roll_fallback_fonts.dart",
"repo_id": "engine",
"token_count": 10418
} | 286 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import { baseUri } from "./base_uri.js";
/**
* Handles injecting the main Flutter web entrypoint (main.dart.js), and notifying
* the user when Flutter is ready, through `didCreateEngineInitializer`.
*
* @see https://docs.flutter.dev/development/platform-integration/web/initialization
*/
export class FlutterEntrypointLoader {
/**
* Creates a FlutterEntrypointLoader.
*/
constructor() {
// Watchdog to prevent injecting the main entrypoint multiple times.
this._scriptLoaded = false;
}
/**
* Injects a TrustedTypesPolicy (or undefined if the feature is not supported).
* @param {TrustedTypesPolicy | undefined} policy
*/
setTrustedTypesPolicy(policy) {
this._ttPolicy = policy;
}
/**
* @deprecated
* Loads flutter main entrypoint, specified by `entrypointUrl`, and calls a
* user-specified `onEntrypointLoaded` callback with an EngineInitializer
* object when it's done.
*
* @param {*} options
* @returns {Promise | undefined} that will eventually resolve with an
* EngineInitializer, or will be rejected with the error caused by the loader.
* Returns undefined when an `onEntrypointLoaded` callback is supplied in `options`.
*/
async loadEntrypoint(options) {
const { entrypointUrl = `${baseUri}main.dart.js`, onEntrypointLoaded, nonce } =
options || {};
return this._loadJSEntrypoint(entrypointUrl, onEntrypointLoaded, nonce);
}
/**
* Loads the entry point for a flutter application.
* @param {import("./types").ApplicationBuild} build
* Information about the specific build that is to be loaded
* @param {*} deps
* External dependencies that may be needed to load the app.
* @param {import("./types").FlutterConfiguration} config
* The application configuration. If no callback is specified, this will be
* passed along to engine when initializing it.
* @param {string} nonce
* A nonce to apply to the main application script tag, if necessary.
* @param {import("./types").OnEntrypointLoadedCallback?} onEntrypointLoaded
* An optional callback to invoke when the entrypoint is loaded. If no
* callback is supplied, the engine initializer and app runner will be
* automatically invoked on load, passing along the supplied flutter
* configuration.
*/
async load(build, deps, config, nonce, onEntrypointLoaded) {
onEntrypointLoaded ??= (engineInitializer) => {
engineInitializer.initializeEngine(config).then((appRunner) => appRunner.runApp())
};
if (build.compileTarget === "dart2wasm") {
return this._loadWasmEntrypoint(build, deps, onEntrypointLoaded);
} else {
const mainPath = build.mainJsPath ?? "main.dart.js";
const entrypointUrl = `${baseUri}${mainPath}`;
return this._loadJSEntrypoint(entrypointUrl, onEntrypointLoaded, nonce);
}
}
/**
* Resolves the promise created by loadEntrypoint, and calls the `onEntrypointLoaded`
* function supplied by the user (if needed).
*
* Called by Flutter through `_flutter.loader.didCreateEngineInitializer` method,
* which is bound to the correct instance of the FlutterEntrypointLoader by
* the FlutterLoader object.
*
* @param {Function} engineInitializer @see https://github.com/flutter/engine/blob/main/lib/web_ui/lib/src/engine/js_interop/js_loader.dart#L42
*/
didCreateEngineInitializer(engineInitializer) {
if (typeof this._didCreateEngineInitializerResolve === "function") {
this._didCreateEngineInitializerResolve(engineInitializer);
// Remove the resolver after the first time, so Flutter Web can hot restart.
this._didCreateEngineInitializerResolve = null;
// Make the engine revert to "auto" initialization on hot restart.
delete _flutter.loader.didCreateEngineInitializer;
}
if (typeof this._onEntrypointLoaded === "function") {
this._onEntrypointLoaded(engineInitializer);
}
}
/**
* Injects a script tag into the DOM, and configures this loader to be able to
* handle the "entrypoint loaded" notifications received from Flutter web.
*
* @param {string} entrypointUrl the URL of the script that will initialize
* Flutter.
* @param {Function} onEntrypointLoaded a callback that will be called when
* Flutter web notifies this object that the entrypoint is
* loaded.
* @returns {Promise | undefined} a Promise that resolves when the entrypoint
* is loaded, or undefined if `onEntrypointLoaded`
* is a function.
*/
_loadJSEntrypoint(entrypointUrl, onEntrypointLoaded, nonce) {
const useCallback = typeof onEntrypointLoaded === "function";
if (!this._scriptLoaded) {
this._scriptLoaded = true;
const scriptTag = this._createScriptTag(entrypointUrl, nonce);
if (useCallback) {
// Just inject the script tag, and return nothing; Flutter will call
// `didCreateEngineInitializer` when it's done.
console.debug("Injecting <script> tag. Using callback.");
this._onEntrypointLoaded = onEntrypointLoaded;
document.head.append(scriptTag);
} else {
// Inject the script tag and return a promise that will get resolved
// with the EngineInitializer object from Flutter when it calls
// `didCreateEngineInitializer` later.
return new Promise((resolve, reject) => {
console.debug(
"Injecting <script> tag. Using Promises. Use the callback approach instead!"
);
this._didCreateEngineInitializerResolve = resolve;
scriptTag.addEventListener("error", reject);
document.head.append(scriptTag);
});
}
}
}
/**
*
* @param {import("./types").WasmApplicationBuild} build
* @param {*} deps
* @param {import("./types").OnEntrypointLoadedCallback} onEntrypointLoaded
*/
async _loadWasmEntrypoint(build, deps, onEntrypointLoaded) {
if (!this._scriptLoaded) {
this._scriptLoaded = true;
this._onEntrypointLoaded = onEntrypointLoaded;
const { mainWasmPath, jsSupportRuntimePath } = build;
const moduleUri = `${baseUri}${mainWasmPath}`;
let jsSupportRuntimeUri = `${baseUri}${jsSupportRuntimePath}`;
if (this._ttPolicy != null) {
jsSupportRuntimeUri = this._ttPolicy.createScriptURL(jsSupportRuntimeUri);
}
const dartModulePromise = WebAssembly.compileStreaming(fetch(moduleUri));
const jsSupportRuntime = await import(jsSupportRuntimeUri);
let imports;
if (build.renderer === "skwasm") {
imports = (async () => {
const skwasmInstance = await deps.skwasm;
window._flutter_skwasmInstance = skwasmInstance;
return {
skwasm: skwasmInstance.wasmExports,
skwasmWrapper: skwasmInstance,
ffi: {
memory: skwasmInstance.wasmMemory,
},
};
})();
} else {
imports = {};
}
const moduleInstance = await jsSupportRuntime.instantiate(dartModulePromise, imports);
await jsSupportRuntime.invoke(moduleInstance);
}
}
/**
* Creates a script tag for the given URL.
* @param {string} url
* @returns {HTMLScriptElement}
*/
_createScriptTag(url, nonce) {
const scriptTag = document.createElement("script");
scriptTag.type = "application/javascript";
if (nonce) {
scriptTag.nonce = nonce;
}
// Apply TrustedTypes validation, if available.
let trustedUrl = url;
if (this._ttPolicy != null) {
trustedUrl = this._ttPolicy.createScriptURL(url);
}
scriptTag.src = trustedUrl;
return scriptTag;
}
}
| engine/lib/web_ui/flutter_js/src/entrypoint_loader.js/0 | {
"file_path": "engine/lib/web_ui/flutter_js/src/entrypoint_loader.js",
"repo_id": "engine",
"token_count": 2847
} | 287 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
part of ui;
/// Linearly interpolate between two numbers, `a` and `b`, by an extrapolation
/// factor `t`.
///
/// When `a` and `b` are equal or both NaN, `a` is returned. Otherwise, if
/// `a`, `b`, and `t` are required to be finite or null, and the result of `a +
/// (b - a) * t` is returned, where nulls are defaulted to 0.0.
double? lerpDouble(num? a, num? b, double t) {
if (a == b || (a?.isNaN ?? false) && (b?.isNaN ?? false)) {
return a?.toDouble();
}
a ??= 0.0;
b ??= 0.0;
assert(a.isFinite, 'Cannot interpolate between finite and non-finite values');
assert(b.isFinite, 'Cannot interpolate between finite and non-finite values');
assert(t.isFinite, 't must be finite when interpolating between values');
return a * (1.0 - t) + b * t;
}
/// Linearly interpolate between two doubles.
///
/// Same as [lerpDouble] but specialized for non-null `double` type.
double _lerpDouble(double a, double b, double t) {
return a * (1.0 - t) + b * t;
}
/// Linearly interpolate between two integers.
///
/// Same as [lerpDouble] but specialized for non-null `int` type.
double _lerpInt(int a, int b, double t) {
return a * (1.0 - t) + b * t;
}
| engine/lib/web_ui/lib/lerp.dart/0 | {
"file_path": "engine/lib/web_ui/lib/lerp.dart",
"repo_id": "engine",
"token_count": 464
} | 288 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/// Bindings for CanvasKit JavaScript API.
///
/// Prefer keeping the original CanvasKit names so it is easier to locate
/// the API behind these bindings in the Skia source code.
// ignore_for_file: non_constant_identifier_names
@JS()
library canvaskit_api;
import 'dart:async';
import 'dart:convert';
import 'dart:js_interop';
import 'dart:js_util' as js_util;
import 'dart:typed_data';
import 'package:meta/meta.dart';
import 'package:ui/ui.dart' as ui;
import '../browser_detection.dart';
import '../configuration.dart';
import '../dom.dart';
import 'renderer.dart';
/// Entrypoint into the CanvasKit API.
late CanvasKit canvasKit;
bool get _enableCanvasKitChromiumInAutoMode => browserSupportsCanvaskitChromium;
/// Sets the [CanvasKit] object on `window` so we can use `@JS()` to bind to
/// static APIs.
///
/// See, e.g. [SkPaint].
///
/// This also acts as a cache of an initialized CanvasKit instance. We can use
/// this, for example, to perform a hot restart without needing to redownload
/// and reinitialize CanvasKit.
@JS('window.flutterCanvasKit')
external set windowFlutterCanvasKit(CanvasKit? value);
@JS('window.flutterCanvasKit')
external CanvasKit? get windowFlutterCanvasKit;
@JS('window.flutterCanvasKitLoaded')
external JSPromise<JSAny>? get windowFlutterCanvasKitLoaded;
@JS()
@anonymous
@staticInterop
class CanvasKit {}
extension CanvasKitExtension on CanvasKit {
external SkBlendModeEnum get BlendMode;
external SkPaintStyleEnum get PaintStyle;
external SkStrokeCapEnum get StrokeCap;
external SkStrokeJoinEnum get StrokeJoin;
external SkBlurStyleEnum get BlurStyle;
external SkTileModeEnum get TileMode;
external SkFilterModeEnum get FilterMode;
external SkMipmapModeEnum get MipmapMode;
external SkFillTypeEnum get FillType;
external SkAlphaTypeEnum get AlphaType;
external SkColorTypeEnum get ColorType;
external SkPathOpEnum get PathOp;
external SkClipOpEnum get ClipOp;
external SkPointModeEnum get PointMode;
external SkVertexModeEnum get VertexMode;
external SkRectHeightStyleEnum get RectHeightStyle;
external SkRectWidthStyleEnum get RectWidthStyle;
external SkAffinityEnum get Affinity;
external SkTextAlignEnum get TextAlign;
external SkTextHeightBehaviorEnum get TextHeightBehavior;
external SkTextDirectionEnum get TextDirection;
external SkFontWeightEnum get FontWeight;
external SkFontSlantEnum get FontSlant;
@JS('MakeAnimatedImageFromEncoded')
external SkAnimatedImage? _MakeAnimatedImageFromEncoded(
JSUint8Array imageData);
SkAnimatedImage? MakeAnimatedImageFromEncoded(Uint8List imageData) =>
_MakeAnimatedImageFromEncoded(imageData.toJS);
external SkShaderNamespace get Shader;
external SkMaskFilterNamespace get MaskFilter;
external SkColorFilterNamespace get ColorFilter;
external SkImageFilterNamespace get ImageFilter;
external SkPathNamespace get Path;
external SkTonalColors computeTonalColors(SkTonalColors inTonalColors);
@JS('MakeVertices')
external SkVertices _MakeVertices(
SkVertexMode mode,
JSFloat32Array positions,
JSFloat32Array? textureCoordinates,
JSUint32Array? colors,
JSUint16Array? indices,
);
SkVertices MakeVertices(
SkVertexMode mode,
Float32List positions,
Float32List? textureCoordinates,
Uint32List? colors,
Uint16List? indices,
) => _MakeVertices(mode, positions.toJS, textureCoordinates?.toJS,
colors?.toJS, indices?.toJS);
external SkParagraphBuilderNamespace get ParagraphBuilder;
external SkParagraphStyle ParagraphStyle(
SkParagraphStyleProperties properties);
external SkTextStyle TextStyle(SkTextStyleProperties properties);
external SkSurface MakeWebGLCanvasSurface(DomCanvasElement canvas);
@JS('MakeSurface')
external SkSurface _MakeSurface(
JSNumber width,
JSNumber height,
);
SkSurface MakeSurface(
double width,
double height,
) => _MakeSurface(width.toJS, height.toJS);
@JS('getDataBytes')
external JSUint8Array _getDataBytes(
SkData skData,
);
Uint8List getDataBytes(
SkData skData,
) => _getDataBytes(skData).toDart;
// Text decoration enum is embedded in the CanvasKit object itself.
@JS('NoDecoration')
external JSNumber get _NoDecoration;
double get NoDecoration => _NoDecoration.toDartDouble;
@JS('UnderlineDecoration')
external JSNumber get _UnderlineDecoration;
double get UnderlineDecoration => _UnderlineDecoration.toDartDouble;
@JS('OverlineDecoration')
external JSNumber get _OverlineDecoration;
double get OverlineDecoration => _OverlineDecoration.toDartDouble;
@JS('LineThroughDecoration')
external JSNumber get _LineThroughDecoration;
double get LineThroughDecoration => _LineThroughDecoration.toDartDouble;
// End of text decoration enum.
external SkTextDecorationStyleEnum get DecorationStyle;
external SkTextBaselineEnum get TextBaseline;
external SkPlaceholderAlignmentEnum get PlaceholderAlignment;
external SkFontMgrNamespace get FontMgr;
external TypefaceFontProviderNamespace get TypefaceFontProvider;
external FontCollectionNamespace get FontCollection;
external SkTypefaceFactory get Typeface;
@JS('GetWebGLContext')
external JSNumber _GetWebGLContext(
DomCanvasElement canvas, SkWebGLContextOptions options);
double GetWebGLContext(
DomCanvasElement canvas, SkWebGLContextOptions options) =>
_GetWebGLContext(canvas, options).toDartDouble;
@JS('GetWebGLContext')
external JSNumber _GetOffscreenWebGLContext(
DomOffscreenCanvas canvas, SkWebGLContextOptions options);
double GetOffscreenWebGLContext(
DomOffscreenCanvas canvas, SkWebGLContextOptions options) =>
_GetOffscreenWebGLContext(canvas, options).toDartDouble;
@JS('MakeGrContext')
external SkGrContext _MakeGrContext(JSNumber glContext);
SkGrContext MakeGrContext(double glContext) =>
_MakeGrContext(glContext.toJS);
@JS('MakeOnScreenGLSurface')
external SkSurface? _MakeOnScreenGLSurface(
SkGrContext grContext,
JSNumber width,
JSNumber height,
ColorSpace colorSpace,
JSNumber sampleCount,
JSNumber stencil,
);
SkSurface? MakeOnScreenGLSurface(
SkGrContext grContext,
double width,
double height,
ColorSpace colorSpace,
int sampleCount,
int stencil,
) => _MakeOnScreenGLSurface(grContext, width.toJS, height.toJS, colorSpace,
sampleCount.toJS, stencil.toJS);
@JS('MakeRenderTarget')
external SkSurface? _MakeRenderTarget(
SkGrContext grContext,
JSNumber width,
JSNumber height,
);
SkSurface? MakeRenderTarget(
SkGrContext grContext,
int width,
int height,
) => _MakeRenderTarget(grContext, width.toJS, height.toJS);
external SkSurface MakeSWCanvasSurface(DomCanvasElement canvas);
@JS('MakeSWCanvasSurface')
external SkSurface MakeOffscreenSWCanvasSurface(DomOffscreenCanvas canvas);
/// Creates an image from decoded pixels represented as a list of bytes.
///
/// The pixel data must be encoded according to the image info in [info].
///
/// Typically pixel data is obtained using [SkImage.readPixels]. The
/// parameters specified in [SkImageInfo] passed [SkImage.readPixels] must
/// match [info].
@JS('MakeImage')
external SkImage? _MakeImage(
SkImageInfo info,
JSUint8Array pixels,
JSNumber bytesPerRow,
);
SkImage? MakeImage(
SkImageInfo info,
Uint8List pixels,
double bytesPerRow,
) => _MakeImage(info, pixels.toJS, bytesPerRow.toJS);
@JS('MakeLazyImageFromTextureSource')
external SkImage? _MakeLazyImageFromTextureSource2(
JSAny src,
SkPartialImageInfo info,
);
@JS('MakeLazyImageFromTextureSource')
external SkImage? _MakeLazyImageFromTextureSource3(
JSAny src,
JSNumber zeroSecondArgument,
JSBoolean srcIsPremultiplied,
);
SkImage? MakeLazyImageFromTextureSourceWithInfo(
Object src,
SkPartialImageInfo info,
) => _MakeLazyImageFromTextureSource2(src.toJSAnyShallow, info);
SkImage? MakeLazyImageFromImageBitmap(
DomImageBitmap imageBitmap,
bool hasPremultipliedAlpha,
) => _MakeLazyImageFromTextureSource3(
imageBitmap as JSObject,
0.toJS,
hasPremultipliedAlpha.toJS,
);
}
@JS('window.CanvasKitInit')
external JSAny _CanvasKitInit(CanvasKitInitOptions options);
Future<CanvasKit> CanvasKitInit(CanvasKitInitOptions options) {
return js_util.promiseToFuture<CanvasKit>(
_CanvasKitInit(options).toObjectShallow);
}
typedef LocateFileCallback = String Function(String file, String unusedBase);
JSFunction createLocateFileCallback(LocateFileCallback callback) =>
callback.toJS;
@JS()
@anonymous
@staticInterop
class CanvasKitInitOptions {
external factory CanvasKitInitOptions({
required JSFunction locateFile,
});
}
@JS('window.flutterCanvasKit.ColorSpace.SRGB')
external ColorSpace get SkColorSpaceSRGB;
@JS()
@staticInterop
class ColorSpace {}
@JS()
@anonymous
@staticInterop
class SkWebGLContextOptions {
factory SkWebGLContextOptions({
required double antialias,
// WebGL version: 1 or 2.
required double majorVersion,
}) => SkWebGLContextOptions._(
antialias: antialias.toJS, majorVersion: majorVersion.toJS
);
external factory SkWebGLContextOptions._({
required JSNumber antialias,
// WebGL version: 1 or 2.
required JSNumber majorVersion,
});
}
@JS('window.flutterCanvasKit.Surface')
@staticInterop
class SkSurface {}
extension SkSurfaceExtension on SkSurface {
external SkCanvas getCanvas();
external JSVoid flush();
@JS('width')
external JSNumber _width();
double width() => _width().toDartDouble;
@JS('height')
external JSNumber _height();
double height() => _height().toDartDouble;
external JSVoid dispose();
external SkImage makeImageSnapshot();
}
@JS()
@staticInterop
class SkGrContext {}
extension SkGrContextExtension on SkGrContext {
@JS('setResourceCacheLimitBytes')
external JSVoid _setResourceCacheLimitBytes(JSNumber limit);
void setResourceCacheLimitBytes(double limit) =>
_setResourceCacheLimitBytes(limit.toJS);
external JSVoid releaseResourcesAndAbandonContext();
external JSVoid delete();
}
@JS()
@anonymous
@staticInterop
class SkFontSlantEnum {}
extension SkFontSlantEnumExtension on SkFontSlantEnum {
external SkFontSlant get Upright;
external SkFontSlant get Italic;
}
@JS('window.flutterCanvasKit.FontSlant')
@staticInterop
class SkFontSlant {}
extension SkFontSlantExtension on SkFontSlant {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkFontSlant> _skFontSlants = <SkFontSlant>[
canvasKit.FontSlant.Upright,
canvasKit.FontSlant.Italic,
];
SkFontSlant toSkFontSlant(ui.FontStyle style) {
return _skFontSlants[style.index];
}
@JS()
@anonymous
@staticInterop
class SkFontWeightEnum {}
extension SkFontWeightEnumExtension on SkFontWeightEnum {
external SkFontWeight get Thin;
external SkFontWeight get ExtraLight;
external SkFontWeight get Light;
external SkFontWeight get Normal;
external SkFontWeight get Medium;
external SkFontWeight get SemiBold;
external SkFontWeight get Bold;
external SkFontWeight get ExtraBold;
external SkFontWeight get ExtraBlack;
}
@JS()
@staticInterop
class SkFontWeight {}
extension SkFontWeightExtension on SkFontWeight {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkFontWeight> _skFontWeights = <SkFontWeight>[
canvasKit.FontWeight.Thin,
canvasKit.FontWeight.ExtraLight,
canvasKit.FontWeight.Light,
canvasKit.FontWeight.Normal,
canvasKit.FontWeight.Medium,
canvasKit.FontWeight.SemiBold,
canvasKit.FontWeight.Bold,
canvasKit.FontWeight.ExtraBold,
canvasKit.FontWeight.ExtraBlack,
];
SkFontWeight toSkFontWeight(ui.FontWeight weight) {
return _skFontWeights[weight.index];
}
@JS()
@staticInterop
class SkAffinityEnum {}
extension SkAffinityEnumExtension on SkAffinityEnum {
external SkAffinity get Upstream;
external SkAffinity get Downstream;
}
@JS()
@staticInterop
class SkAffinity {}
extension SkAffinityExtension on SkAffinity {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkAffinity> _skAffinitys = <SkAffinity>[
canvasKit.Affinity.Upstream,
canvasKit.Affinity.Downstream,
];
SkAffinity toSkAffinity(ui.TextAffinity affinity) {
return _skAffinitys[affinity.index];
}
@JS()
@staticInterop
class SkTextDirectionEnum {}
extension SkTextDirectionEnumExtension on SkTextDirectionEnum {
external SkTextDirection get RTL;
external SkTextDirection get LTR;
}
@JS()
@staticInterop
class SkTextDirection {}
extension SkTextDirectionExtension on SkTextDirection {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
// Flutter enumerates text directions as RTL, LTR, while CanvasKit
// enumerates them LTR, RTL.
final List<SkTextDirection> _skTextDirections = <SkTextDirection>[
canvasKit.TextDirection.RTL,
canvasKit.TextDirection.LTR,
];
SkTextDirection toSkTextDirection(ui.TextDirection direction) {
return _skTextDirections[direction.index];
}
@JS()
@staticInterop
class SkTextAlignEnum {}
extension SkTextAlignEnumExtension on SkTextAlignEnum {
external SkTextAlign get Left;
external SkTextAlign get Right;
external SkTextAlign get Center;
external SkTextAlign get Justify;
external SkTextAlign get Start;
external SkTextAlign get End;
}
@JS()
@staticInterop
class SkTextAlign {}
extension SkTextAlignExtension on SkTextAlign {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkTextAlign> _skTextAligns = <SkTextAlign>[
canvasKit.TextAlign.Left,
canvasKit.TextAlign.Right,
canvasKit.TextAlign.Center,
canvasKit.TextAlign.Justify,
canvasKit.TextAlign.Start,
canvasKit.TextAlign.End,
];
SkTextAlign toSkTextAlign(ui.TextAlign align) {
return _skTextAligns[align.index];
}
@JS()
@staticInterop
class SkTextHeightBehaviorEnum {}
extension SkTextHeightBehaviorEnumExtension on SkTextHeightBehaviorEnum {
external SkTextHeightBehavior get All;
external SkTextHeightBehavior get DisableFirstAscent;
external SkTextHeightBehavior get DisableLastDescent;
external SkTextHeightBehavior get DisableAll;
}
@JS()
@staticInterop
class SkTextHeightBehavior {}
extension SkTextHeightBehaviorExtension on SkTextHeightBehavior {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkTextHeightBehavior> _skTextHeightBehaviors =
<SkTextHeightBehavior>[
canvasKit.TextHeightBehavior.All,
canvasKit.TextHeightBehavior.DisableFirstAscent,
canvasKit.TextHeightBehavior.DisableLastDescent,
canvasKit.TextHeightBehavior.DisableAll,
];
SkTextHeightBehavior toSkTextHeightBehavior(ui.TextHeightBehavior behavior) {
final int index = (behavior.applyHeightToFirstAscent ? 0 : 1 << 0) |
(behavior.applyHeightToLastDescent ? 0 : 1 << 1);
return _skTextHeightBehaviors[index];
}
@JS()
@staticInterop
class SkRectHeightStyleEnum {}
extension SkRectHeightStyleEnumExtension on SkRectHeightStyleEnum {
external SkRectHeightStyle get Tight;
external SkRectHeightStyle get Max;
external SkRectHeightStyle get IncludeLineSpacingMiddle;
external SkRectHeightStyle get IncludeLineSpacingTop;
external SkRectHeightStyle get IncludeLineSpacingBottom;
external SkRectHeightStyle get Strut;
}
@JS()
@staticInterop
class SkRectHeightStyle {}
extension SkRectHeightStyleExtension on SkRectHeightStyle {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkRectHeightStyle> _skRectHeightStyles = <SkRectHeightStyle>[
canvasKit.RectHeightStyle.Tight,
canvasKit.RectHeightStyle.Max,
canvasKit.RectHeightStyle.IncludeLineSpacingMiddle,
canvasKit.RectHeightStyle.IncludeLineSpacingTop,
canvasKit.RectHeightStyle.IncludeLineSpacingBottom,
canvasKit.RectHeightStyle.Strut,
];
SkRectHeightStyle toSkRectHeightStyle(ui.BoxHeightStyle style) {
return _skRectHeightStyles[style.index];
}
@JS()
@staticInterop
class SkRectWidthStyleEnum {}
extension SkRectWidthStyleEnumExtension on SkRectWidthStyleEnum {
external SkRectWidthStyle get Tight;
external SkRectWidthStyle get Max;
}
@JS()
@staticInterop
class SkRectWidthStyle {}
extension SkRectWidthStyleExtension on SkRectWidthStyle {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkRectWidthStyle> _skRectWidthStyles = <SkRectWidthStyle>[
canvasKit.RectWidthStyle.Tight,
canvasKit.RectWidthStyle.Max,
];
SkRectWidthStyle toSkRectWidthStyle(ui.BoxWidthStyle style) {
final int index = style.index;
return _skRectWidthStyles[index < 2 ? index : 0];
}
@JS()
@staticInterop
class SkVertexModeEnum {}
extension SkVertexModeEnumExtension on SkVertexModeEnum {
external SkVertexMode get Triangles;
external SkVertexMode get TrianglesStrip;
external SkVertexMode get TriangleFan;
}
@JS()
@staticInterop
class SkVertexMode {}
extension SkVertexModeExtension on SkVertexMode {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkVertexMode> _skVertexModes = <SkVertexMode>[
canvasKit.VertexMode.Triangles,
canvasKit.VertexMode.TrianglesStrip,
canvasKit.VertexMode.TriangleFan,
];
SkVertexMode toSkVertexMode(ui.VertexMode mode) {
return _skVertexModes[mode.index];
}
@JS()
@staticInterop
class SkPointModeEnum {}
extension SkPointModeEnumExtension on SkPointModeEnum {
external SkPointMode get Points;
external SkPointMode get Lines;
external SkPointMode get Polygon;
}
@JS()
@staticInterop
class SkPointMode {}
extension SkPointModeExtension on SkPointMode {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkPointMode> _skPointModes = <SkPointMode>[
canvasKit.PointMode.Points,
canvasKit.PointMode.Lines,
canvasKit.PointMode.Polygon,
];
SkPointMode toSkPointMode(ui.PointMode mode) {
return _skPointModes[mode.index];
}
@JS()
@staticInterop
class SkClipOpEnum {}
extension SkClipOpEnumExtension on SkClipOpEnum {
external SkClipOp get Difference;
external SkClipOp get Intersect;
}
@JS()
@staticInterop
class SkClipOp {}
extension SkClipOpExtension on SkClipOp {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkClipOp> _skClipOps = <SkClipOp>[
canvasKit.ClipOp.Difference,
canvasKit.ClipOp.Intersect,
];
SkClipOp toSkClipOp(ui.ClipOp clipOp) {
return _skClipOps[clipOp.index];
}
@JS()
@staticInterop
class SkFillTypeEnum {}
extension SkFillTypeEnumExtension on SkFillTypeEnum {
external SkFillType get Winding;
external SkFillType get EvenOdd;
}
@JS()
@staticInterop
class SkFillType {}
extension SkFillTypeExtension on SkFillType {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkFillType> _skFillTypes = <SkFillType>[
canvasKit.FillType.Winding,
canvasKit.FillType.EvenOdd,
];
SkFillType toSkFillType(ui.PathFillType fillType) {
return _skFillTypes[fillType.index];
}
@JS()
@staticInterop
class SkPathOpEnum {}
extension SkPathOpEnumExtension on SkPathOpEnum {
external SkPathOp get Difference;
external SkPathOp get Intersect;
external SkPathOp get Union;
external SkPathOp get XOR;
external SkPathOp get ReverseDifference;
}
@JS()
@staticInterop
class SkPathOp {}
extension SkPathOpExtension on SkPathOp {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkPathOp> _skPathOps = <SkPathOp>[
canvasKit.PathOp.Difference,
canvasKit.PathOp.Intersect,
canvasKit.PathOp.Union,
canvasKit.PathOp.XOR,
canvasKit.PathOp.ReverseDifference,
];
SkPathOp toSkPathOp(ui.PathOperation pathOp) {
return _skPathOps[pathOp.index];
}
@JS()
@staticInterop
class SkBlurStyleEnum {}
extension SkBlurStyleEnumExtension on SkBlurStyleEnum {
external SkBlurStyle get Normal;
external SkBlurStyle get Solid;
external SkBlurStyle get Outer;
external SkBlurStyle get Inner;
}
@JS()
@staticInterop
class SkBlurStyle {}
extension SkBlurStyleExtension on SkBlurStyle {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkBlurStyle> _skBlurStyles = <SkBlurStyle>[
canvasKit.BlurStyle.Normal,
canvasKit.BlurStyle.Solid,
canvasKit.BlurStyle.Outer,
canvasKit.BlurStyle.Inner,
];
SkBlurStyle toSkBlurStyle(ui.BlurStyle style) {
return _skBlurStyles[style.index];
}
@JS()
@staticInterop
class SkStrokeCapEnum {}
extension SkStrokeCapEnumExtension on SkStrokeCapEnum {
external SkStrokeCap get Butt;
external SkStrokeCap get Round;
external SkStrokeCap get Square;
}
@JS()
@staticInterop
class SkStrokeCap {}
extension SkStrokeCapExtension on SkStrokeCap {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkStrokeCap> _skStrokeCaps = <SkStrokeCap>[
canvasKit.StrokeCap.Butt,
canvasKit.StrokeCap.Round,
canvasKit.StrokeCap.Square,
];
SkStrokeCap toSkStrokeCap(ui.StrokeCap strokeCap) {
return _skStrokeCaps[strokeCap.index];
}
@JS()
@staticInterop
class SkPaintStyleEnum {}
extension SkPaintStyleEnumExtension on SkPaintStyleEnum {
external SkPaintStyle get Stroke;
external SkPaintStyle get Fill;
}
@JS()
@staticInterop
class SkPaintStyle {}
extension SkPaintStyleExtension on SkPaintStyle {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkPaintStyle> _skPaintStyles = <SkPaintStyle>[
canvasKit.PaintStyle.Fill,
canvasKit.PaintStyle.Stroke,
];
SkPaintStyle toSkPaintStyle(ui.PaintingStyle paintStyle) {
return _skPaintStyles[paintStyle.index];
}
@JS()
@staticInterop
class SkBlendModeEnum {}
extension SkBlendModeEnumExtension on SkBlendModeEnum {
external SkBlendMode get Clear;
external SkBlendMode get Src;
external SkBlendMode get Dst;
external SkBlendMode get SrcOver;
external SkBlendMode get DstOver;
external SkBlendMode get SrcIn;
external SkBlendMode get DstIn;
external SkBlendMode get SrcOut;
external SkBlendMode get DstOut;
external SkBlendMode get SrcATop;
external SkBlendMode get DstATop;
external SkBlendMode get Xor;
external SkBlendMode get Plus;
external SkBlendMode get Modulate;
external SkBlendMode get Screen;
external SkBlendMode get Overlay;
external SkBlendMode get Darken;
external SkBlendMode get Lighten;
external SkBlendMode get ColorDodge;
external SkBlendMode get ColorBurn;
external SkBlendMode get HardLight;
external SkBlendMode get SoftLight;
external SkBlendMode get Difference;
external SkBlendMode get Exclusion;
external SkBlendMode get Multiply;
external SkBlendMode get Hue;
external SkBlendMode get Saturation;
external SkBlendMode get Color;
external SkBlendMode get Luminosity;
}
@JS()
@staticInterop
class SkBlendMode {}
extension SkBlendModeExtension on SkBlendMode {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkBlendMode> _skBlendModes = <SkBlendMode>[
canvasKit.BlendMode.Clear,
canvasKit.BlendMode.Src,
canvasKit.BlendMode.Dst,
canvasKit.BlendMode.SrcOver,
canvasKit.BlendMode.DstOver,
canvasKit.BlendMode.SrcIn,
canvasKit.BlendMode.DstIn,
canvasKit.BlendMode.SrcOut,
canvasKit.BlendMode.DstOut,
canvasKit.BlendMode.SrcATop,
canvasKit.BlendMode.DstATop,
canvasKit.BlendMode.Xor,
canvasKit.BlendMode.Plus,
canvasKit.BlendMode.Modulate,
canvasKit.BlendMode.Screen,
canvasKit.BlendMode.Overlay,
canvasKit.BlendMode.Darken,
canvasKit.BlendMode.Lighten,
canvasKit.BlendMode.ColorDodge,
canvasKit.BlendMode.ColorBurn,
canvasKit.BlendMode.HardLight,
canvasKit.BlendMode.SoftLight,
canvasKit.BlendMode.Difference,
canvasKit.BlendMode.Exclusion,
canvasKit.BlendMode.Multiply,
canvasKit.BlendMode.Hue,
canvasKit.BlendMode.Saturation,
canvasKit.BlendMode.Color,
canvasKit.BlendMode.Luminosity,
];
SkBlendMode toSkBlendMode(ui.BlendMode blendMode) {
return _skBlendModes[blendMode.index];
}
@JS()
@staticInterop
class SkStrokeJoinEnum {}
extension SkStrokeJoinEnumExtension on SkStrokeJoinEnum {
external SkStrokeJoin get Miter;
external SkStrokeJoin get Round;
external SkStrokeJoin get Bevel;
}
@JS()
@staticInterop
class SkStrokeJoin {}
extension SkStrokeJoinExtension on SkStrokeJoin {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkStrokeJoin> _skStrokeJoins = <SkStrokeJoin>[
canvasKit.StrokeJoin.Miter,
canvasKit.StrokeJoin.Round,
canvasKit.StrokeJoin.Bevel,
];
SkStrokeJoin toSkStrokeJoin(ui.StrokeJoin strokeJoin) {
return _skStrokeJoins[strokeJoin.index];
}
@JS()
@staticInterop
class SkTileModeEnum {}
extension SkTileModeEnumExtension on SkTileModeEnum {
external SkTileMode get Clamp;
external SkTileMode get Repeat;
external SkTileMode get Mirror;
external SkTileMode get Decal;
}
@JS()
@staticInterop
class SkTileMode {}
extension SkTileModeExtension on SkTileMode {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkTileMode> _skTileModes = <SkTileMode>[
canvasKit.TileMode.Clamp,
canvasKit.TileMode.Repeat,
canvasKit.TileMode.Mirror,
canvasKit.TileMode.Decal,
];
SkTileMode toSkTileMode(ui.TileMode mode) {
return _skTileModes[mode.index];
}
@JS()
@staticInterop
class SkFilterModeEnum {}
extension SkFilterModeEnumExtension on SkFilterModeEnum {
external SkFilterMode get Nearest;
external SkFilterMode get Linear;
}
@JS()
@staticInterop
class SkFilterMode {}
extension SkFilterModeExtension on SkFilterMode {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
SkFilterMode toSkFilterMode(ui.FilterQuality filterQuality) {
return filterQuality == ui.FilterQuality.none
? canvasKit.FilterMode.Nearest
: canvasKit.FilterMode.Linear;
}
@JS()
@staticInterop
class SkMipmapModeEnum {}
extension SkMipmapModeEnumExtension on SkMipmapModeEnum {
external SkMipmapMode get None;
external SkMipmapMode get Nearest;
external SkMipmapMode get Linear;
}
@JS()
@staticInterop
class SkMipmapMode {}
extension SkMipmapModeExtension on SkMipmapMode {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
SkMipmapMode toSkMipmapMode(ui.FilterQuality filterQuality) {
return filterQuality == ui.FilterQuality.medium
? canvasKit.MipmapMode.Linear
: canvasKit.MipmapMode.None;
}
@JS()
@staticInterop
class SkAlphaTypeEnum {}
extension SkAlphaTypeEnumExtension on SkAlphaTypeEnum {
external SkAlphaType get Opaque;
external SkAlphaType get Premul;
external SkAlphaType get Unpremul;
}
@JS()
@staticInterop
class SkAlphaType {}
extension SkAlphaTypeExtension on SkAlphaType {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
@JS()
@staticInterop
class SkColorTypeEnum {}
extension SkColorTypeEnumExtension on SkColorTypeEnum {
external SkColorType get Alpha_8;
external SkColorType get RGB_565;
external SkColorType get ARGB_4444;
external SkColorType get RGBA_8888;
external SkColorType get RGB_888x;
external SkColorType get BGRA_8888;
external SkColorType get RGBA_1010102;
external SkColorType get RGB_101010x;
external SkColorType get Gray_8;
external SkColorType get RGBA_F16;
external SkColorType get RGBA_F32;
}
@JS()
@staticInterop
class SkColorType {}
extension SkColorTypeExtension on SkColorType {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
@JS()
@anonymous
@staticInterop
class SkAnimatedImage {}
extension SkAnimatedImageExtension on SkAnimatedImage {
@JS('getFrameCount')
external JSNumber _getFrameCount();
double getFrameCount() => _getFrameCount().toDartDouble;
@JS('getRepetitionCount')
external JSNumber _getRepetitionCount();
double getRepetitionCount() => _getRepetitionCount().toDartDouble;
/// Returns duration in milliseconds.
@JS('currentFrameDuration')
external JSNumber _currentFrameDuration();
double currentFrameDuration() => _currentFrameDuration().toDartDouble;
/// Advances to the next frame and returns its duration in milliseconds.
@JS('decodeNextFrame')
external JSNumber _decodeNextFrame();
double decodeNextFrame() => _decodeNextFrame().toDartDouble;
external SkImage makeImageAtCurrentFrame();
@JS('width')
external JSNumber _width();
double width() => _width().toDartDouble;
@JS('height')
external JSNumber _height();
double height() => _height().toDartDouble;
/// Deletes the C++ object.
///
/// This object is no longer usable after calling this method.
external JSVoid delete();
@JS('isDeleted')
external JSBoolean _isDeleted();
bool isDeleted() => _isDeleted().toDart;
}
@JS()
@anonymous
@staticInterop
class SkImage {}
extension SkImageExtension on SkImage {
external JSVoid delete();
@JS('width')
external JSNumber _width();
double width() => _width().toDartDouble;
@JS('height')
external JSNumber _height();
double height() => _height().toDartDouble;
@JS('makeShaderCubic')
external SkShader _makeShaderCubic(
SkTileMode tileModeX,
SkTileMode tileModeY,
JSNumber B,
JSNumber C,
JSFloat32Array? matrix, // 3x3 matrix
);
SkShader makeShaderCubic(
SkTileMode tileModeX,
SkTileMode tileModeY,
double B,
double C,
Float32List? matrix, // 3x3 matrix
) => _makeShaderCubic(tileModeX, tileModeY, B.toJS, C.toJS, matrix?.toJS);
@JS('makeShaderOptions')
external SkShader _makeShaderOptions(
SkTileMode tileModeX,
SkTileMode tileModeY,
SkFilterMode filterMode,
SkMipmapMode mipmapMode,
JSFloat32Array? matrix, // 3x3 matrix
);
SkShader makeShaderOptions(
SkTileMode tileModeX,
SkTileMode tileModeY,
SkFilterMode filterMode,
SkMipmapMode mipmapMode,
Float32List? matrix, // 3x3 matrix
) => _makeShaderOptions(tileModeX, tileModeY, filterMode, mipmapMode,
matrix?.toJS);
@JS('readPixels')
external JSUint8Array _readPixels(
JSNumber srcX, JSNumber srcY, SkImageInfo imageInfo);
Uint8List readPixels(double srcX, double srcY, SkImageInfo imageInfo) =>
_readPixels(srcX.toJS, srcY.toJS, imageInfo).toDart;
@JS('encodeToBytes')
external JSUint8Array? _encodeToBytes();
Uint8List? encodeToBytes() => _encodeToBytes()?.toDart;
@JS('isAliasOf')
external JSBoolean _isAliasOf(SkImage other);
bool isAliasOf(SkImage other) => _isAliasOf(other).toDart;
@JS('isDeleted')
external JSBoolean _isDeleted();
bool isDeleted() => _isDeleted().toDart;
}
@JS()
@staticInterop
class SkShaderNamespace {}
extension SkShaderNamespaceExtension on SkShaderNamespace {
@JS('MakeLinearGradient')
external SkShader _MakeLinearGradient(
JSFloat32Array from, // 2-element array
JSFloat32Array to, // 2-element array
JSUint32Array colors,
JSFloat32Array colorStops,
SkTileMode tileMode,
JSFloat32Array? matrix,
);
SkShader MakeLinearGradient(
Float32List from, // 2-element array
Float32List to, // 2-element array
Uint32List colors,
Float32List colorStops,
SkTileMode tileMode,
Float32List? matrix,
) => _MakeLinearGradient(from.toJS, to.toJS, colors.toJS, colorStops.toJS,
tileMode, matrix?.toJS);
@JS('MakeRadialGradient')
external SkShader _MakeRadialGradient(
JSFloat32Array center, // 2-element array
JSNumber radius,
JSUint32Array colors,
JSFloat32Array colorStops,
SkTileMode tileMode,
JSFloat32Array? matrix, // 3x3 matrix
JSNumber flags,
);
SkShader MakeRadialGradient(
Float32List center, // 2-element array
double radius,
Uint32List colors,
Float32List colorStops,
SkTileMode tileMode,
Float32List? matrix, // 3x3 matrix
double flags,
) => _MakeRadialGradient(center.toJS, radius.toJS, colors.toJS,
colorStops.toJS, tileMode, matrix?.toJS,
flags.toJS);
@JS('MakeTwoPointConicalGradient')
external SkShader _MakeTwoPointConicalGradient(
JSFloat32Array focal,
JSNumber focalRadius,
JSFloat32Array center,
JSNumber radius,
JSUint32Array colors,
JSFloat32Array colorStops,
SkTileMode tileMode,
JSFloat32Array? matrix, // 3x3 matrix
JSNumber flags,
);
SkShader MakeTwoPointConicalGradient(
Float32List focal,
double focalRadius,
Float32List center,
double radius,
Uint32List colors,
Float32List colorStops,
SkTileMode tileMode,
Float32List? matrix, // 3x3 matrix
double flags,
) => _MakeTwoPointConicalGradient(focal.toJS, focalRadius.toJS, center.toJS,
radius.toJS, colors.toJS, colorStops.toJS,
tileMode, matrix?.toJS, flags.toJS);
@JS('MakeSweepGradient')
external SkShader _MakeSweepGradient(
JSNumber cx,
JSNumber cy,
JSUint32Array colors,
JSFloat32Array colorStops,
SkTileMode tileMode,
JSFloat32Array? matrix, // 3x3 matrix
JSNumber flags,
JSNumber startAngle,
JSNumber endAngle,
);
SkShader MakeSweepGradient(
double cx,
double cy,
Uint32List colors,
Float32List colorStops,
SkTileMode tileMode,
Float32List? matrix, // 3x3 matrix
double flags,
double startAngle,
double endAngle,
) => _MakeSweepGradient(cx.toJS, cy.toJS, colors.toJS, colorStops.toJS,
tileMode, matrix?.toJS, flags.toJS, startAngle.toJS,
endAngle.toJS);
}
@JS()
@anonymous
@staticInterop
class SkShader {}
extension SkShaderExtension on SkShader {
external JSVoid delete();
}
@JS()
@staticInterop
class SkMaskFilterNamespace {}
extension SkMaskFilterNamespaceExtension on SkMaskFilterNamespace {
// Creates a blur MaskFilter.
//
// Returns `null` if [sigma] is 0 or infinite.
@JS('MakeBlur')
external SkMaskFilter? _MakeBlur(
SkBlurStyle blurStyle, JSNumber sigma, JSBoolean respectCTM);
SkMaskFilter? MakeBlur(
SkBlurStyle blurStyle, double sigma, bool respectCTM) =>
_MakeBlur(blurStyle, sigma.toJS, respectCTM.toJS);
}
// This needs to be bound to top-level because SkPaint is initialized
// with `new`. Also in Dart you can't write this:
//
// external SkPaint SkPaint();
@JS('window.flutterCanvasKit.Paint')
@staticInterop
class SkPaint {
external factory SkPaint();
}
extension SkPaintExtension on SkPaint {
external JSVoid setBlendMode(SkBlendMode blendMode);
external JSVoid setStyle(SkPaintStyle paintStyle);
@JS('setStrokeWidth')
external JSVoid _setStrokeWidth(JSNumber width);
JSVoid setStrokeWidth(double width) => _setStrokeWidth(width.toJS);
external JSVoid setStrokeCap(SkStrokeCap cap);
external JSVoid setStrokeJoin(SkStrokeJoin join);
@JS('setAntiAlias')
external JSVoid _setAntiAlias(JSBoolean isAntiAlias);
void setAntiAlias(bool isAntiAlias) => _setAntiAlias(isAntiAlias.toJS);
@JS('setColorInt')
external JSVoid _setColorInt(JSNumber color);
void setColorInt(double color) => _setColorInt(color.toJS);
external JSVoid setShader(SkShader? shader);
external JSVoid setMaskFilter(SkMaskFilter? maskFilter);
external JSVoid setColorFilter(SkColorFilter? colorFilter);
@JS('setStrokeMiter')
external JSVoid _setStrokeMiter(JSNumber miterLimit);
void setStrokeMiter(double miterLimit) => _setStrokeMiter(miterLimit.toJS);
external JSVoid setImageFilter(SkImageFilter? imageFilter);
external JSVoid delete();
}
@JS()
@anonymous
@staticInterop
abstract class CkFilterOptions {}
@JS()
@anonymous
@staticInterop
class _CkCubicFilterOptions extends CkFilterOptions {
external factory _CkCubicFilterOptions(
{required JSNumber B, required JSNumber C});
}
@JS()
@anonymous
@staticInterop
class _CkTransformFilterOptions extends CkFilterOptions {
external factory _CkTransformFilterOptions(
{SkFilterMode filter, SkMipmapMode mipmap});
}
final Map<ui.FilterQuality, CkFilterOptions> _filterOptions =
<ui.FilterQuality, CkFilterOptions>{
ui.FilterQuality.none: _CkTransformFilterOptions(
filter: canvasKit.FilterMode.Nearest,
mipmap: canvasKit.MipmapMode.None,
),
ui.FilterQuality.low: _CkTransformFilterOptions(
filter: canvasKit.FilterMode.Linear,
mipmap: canvasKit.MipmapMode.None,
),
ui.FilterQuality.medium: _CkTransformFilterOptions(
filter: canvasKit.FilterMode.Linear,
mipmap: canvasKit.MipmapMode.Linear,
),
ui.FilterQuality.high: _CkCubicFilterOptions(
B: (1.0 / 3).toJS,
C: (1.0 / 3).toJS,
),
};
CkFilterOptions toSkFilterOptions(ui.FilterQuality filterQuality) {
return _filterOptions[filterQuality]!;
}
@JS()
@anonymous
@staticInterop
class SkMaskFilter {}
extension SkMaskFilterExtension on SkMaskFilter {
external JSVoid delete();
}
@JS()
@staticInterop
class SkColorFilterNamespace {}
extension SkColorFilterNamespaceExtension on SkColorFilterNamespace {
@JS('MakeBlend')
external SkColorFilter? _MakeBlend(
JSFloat32Array color, SkBlendMode blendMode);
SkColorFilter? MakeBlend(Float32List color, SkBlendMode blendMode) =>
_MakeBlend(color.toJS, blendMode);
@JS('MakeMatrix')
external SkColorFilter _MakeMatrix(
JSFloat32Array matrix, // 20-element matrix
);
SkColorFilter MakeMatrix(
Float32List matrix, // 20-element matrix
) => _MakeMatrix(matrix.toJS);
external SkColorFilter MakeLinearToSRGBGamma();
external SkColorFilter MakeSRGBToLinearGamma();
external SkColorFilter MakeCompose(SkColorFilter? outer, SkColorFilter inner);
}
@JS()
@anonymous
@staticInterop
class SkColorFilter {}
extension SkColorFilterExtension on SkColorFilter {
external JSVoid delete();
}
@JS()
@staticInterop
class SkImageFilterNamespace {}
extension SkImageFilterNamespaceExtension on SkImageFilterNamespace {
@JS('MakeBlur')
external SkImageFilter _MakeBlur(
JSNumber sigmaX,
JSNumber sigmaY,
SkTileMode tileMode,
JSVoid input, // we don't use this yet
);
SkImageFilter MakeBlur(
double sigmaX,
double sigmaY,
SkTileMode tileMode,
void input, // we don't use this yet
) => _MakeBlur(sigmaX.toJS, sigmaY.toJS, tileMode, input);
@JS('MakeMatrixTransform')
external SkImageFilter _MakeMatrixTransform(
JSFloat32Array matrix, // 3x3 matrix
CkFilterOptions filterOptions,
JSVoid input, // we don't use this yet
);
SkImageFilter MakeMatrixTransform(
Float32List matrix, // 3x3 matrix
CkFilterOptions filterOptions,
void input, // we don't use this yet
) => _MakeMatrixTransform(matrix.toJS, filterOptions, input);
external SkImageFilter MakeColorFilter(
SkColorFilter colorFilter,
JSVoid input, // we don't use this yet
);
external SkImageFilter MakeCompose(
SkImageFilter outer,
SkImageFilter inner,
);
}
@JS()
@anonymous
@staticInterop
class SkImageFilter {}
extension SkImageFilterExtension on SkImageFilter {
external JSVoid delete();
@JS('getOutputBounds')
external JSInt32Array _getOutputBounds(JSFloat32Array bounds);
Int32List getOutputBounds(Float32List bounds) =>
_getOutputBounds(bounds.toJS).toDart;
}
@JS()
@staticInterop
class SkPathNamespace {}
extension SkPathNamespaceExtension on SkPathNamespace {
/// Creates an [SkPath] using commands obtained from [SkPath.toCmds].
@JS('MakeFromCmds')
external SkPath _MakeFromCmds(JSAny pathCommands);
SkPath MakeFromCmds(List<dynamic> pathCommands) =>
_MakeFromCmds(pathCommands.toJSAnyShallow);
/// Creates an [SkPath] by combining [path1] and [path2] using [pathOp].
external SkPath MakeFromOp(SkPath path1, SkPath path2, SkPathOp pathOp);
}
/// Converts a 4x4 Flutter matrix (represented as a [Float32List] in
/// column major order) to an SkM44 which is a 4x4 matrix represented
/// as a [Float32List] in row major order.
Float32List toSkM44FromFloat32(Float32List matrix4) {
final Float32List skM44 = Float32List(16);
for (int r = 0; r < 4; r++) {
for (int c = 0; c < 4; c++) {
skM44[c * 4 + r] = matrix4[r * 4 + c];
}
}
return skM44;
}
// Mappings from SkMatrix-index to input-index.
const List<int> _skMatrixIndexToMatrix4Index = <int>[
0, 4, 12, // Row 1
1, 5, 13, // Row 2
3, 7, 15, // Row 3
];
/// Converts a 4x4 Flutter matrix (represented as a [Float32List]) to an
/// SkMatrix, which is a 3x3 transform matrix.
Float32List toSkMatrixFromFloat32(Float32List matrix4) {
final Float32List skMatrix = Float32List(9);
for (int i = 0; i < 9; ++i) {
final int matrix4Index = _skMatrixIndexToMatrix4Index[i];
if (matrix4Index < matrix4.length) {
skMatrix[i] = matrix4[matrix4Index];
} else {
skMatrix[i] = 0.0;
}
}
return skMatrix;
}
/// Converts a 4x4 Flutter matrix (represented as a [Float32List]) to an
/// SkMatrix, which is a 3x3 transform matrix.
Float32List toSkMatrixFromFloat64(Float64List matrix4) {
final Float32List skMatrix = Float32List(9);
for (int i = 0; i < 9; ++i) {
final int matrix4Index = _skMatrixIndexToMatrix4Index[i];
if (matrix4Index < matrix4.length) {
skMatrix[i] = matrix4[matrix4Index];
} else {
skMatrix[i] = 0.0;
}
}
return skMatrix;
}
/// Converts an [offset] into an `[x, y]` pair stored in a `Float32List`.
///
/// The returned list can be passed to CanvasKit API that take points.
Float32List toSkPoint(ui.Offset offset) {
final Float32List point = Float32List(2);
point[0] = offset.dx;
point[1] = offset.dy;
return point;
}
/// Color stops used when the framework specifies `null`.
final Float32List _kDefaultSkColorStops = Float32List(2)
..[0] = 0
..[1] = 1;
/// Converts a list of color stops into a Skia-compatible JS array or color stops.
///
/// In Flutter `null` means two color stops `[0, 1]` that in Skia must be specified explicitly.
Float32List toSkColorStops(List<double>? colorStops) {
if (colorStops == null) {
return _kDefaultSkColorStops;
}
final int len = colorStops.length;
final Float32List skColorStops = Float32List(len);
for (int i = 0; i < len; i++) {
skColorStops[i] = colorStops[i];
}
return skColorStops;
}
@JS()
@staticInterop
abstract class _NativeType {}
@JS('Float32Array')
external _NativeType get _nativeFloat32ArrayType;
@JS('Uint32Array')
external _NativeType get _nativeUint32ArrayType;
@JS('window.flutterCanvasKit.Malloc')
external JSAny _malloc(_NativeType nativeType, JSNumber length);
/// Allocates a [Float32List] of [length] elements, backed by WASM memory,
/// managed by a [SkFloat32List].
///
/// To free the allocated array use [free].
SkFloat32List mallocFloat32List(int length) {
return _malloc(_nativeFloat32ArrayType, length.toJS) as SkFloat32List;
}
/// Allocates a [Uint32List] of [length] elements, backed by WASM memory,
/// managed by a [SkUint32List].
///
/// To free the allocated array use [free].
SkUint32List mallocUint32List(int length) {
return _malloc(_nativeUint32ArrayType, length.toJS) as SkUint32List;
}
/// Frees the WASM memory occupied by a [SkFloat32List] or [SkUint32List].
///
/// The [list] is no longer usable after calling this function.
///
/// Use this function to free lists owned by the engine.
@JS('window.flutterCanvasKit.Free')
external JSVoid free(MallocObj list);
@JS()
@staticInterop
abstract class MallocObj {}
/// Wraps a [Float32List] backed by WASM memory.
///
/// This wrapper is necessary because the raw [Float32List] will get detached
/// when WASM grows its memory. Call [toTypedArray] to get a new instance
/// that's attached to the current WASM memory block.
@JS()
@staticInterop
class SkFloat32List extends MallocObj {}
extension SkFloat32ListExtension on SkFloat32List {
/// The number of objects this pointer refers to.
@JS('length')
external JSNumber get _length;
double get length => _length.toDartDouble;
@JS('length')
external set _length(JSNumber length);
set length(double l) => _length = l.toJS;
/// Returns the [Float32List] object backed by WASM memory.
///
/// Do not reuse the returned array across multiple WASM function/method
/// invocations that may lead to WASM memory to grow. When WASM memory
/// grows, the returned [Float32List] object becomes "detached" and is no
/// longer usable. Instead, call this method every time you need to read from
/// or write to the list.
@JS('toTypedArray')
external JSFloat32Array _toTypedArray();
Float32List toTypedArray() => _toTypedArray().toDart;
}
/// Wraps a [Uint32List] backed by WASM memory.
///
/// This wrapper is necessary because the raw [Uint32List] will get detached
/// when WASM grows its memory. Call [toTypedArray] to get a new instance
/// that's attached to the current WASM memory block.
@JS()
@staticInterop
class SkUint32List extends MallocObj {}
extension SkUint32ListExtension on SkUint32List {
/// The number of objects this pointer refers to.
@JS('length')
external JSNumber get _length;
double get length => _length.toDartDouble;
@JS('length')
external set _length(JSNumber length);
set length(double l) => _length = l.toJS;
/// Returns the [Uint32List] object backed by WASM memory.
///
/// Do not reuse the returned array across multiple WASM function/method
/// invocations that may lead to WASM memory to grow. When WASM memory
/// grows, the returned [Uint32List] object becomes "detached" and is no
/// longer usable. Instead, call this method every time you need to read from
/// or write to the list.
@JS('toTypedArray')
external JSUint32Array _toTypedArray();
Uint32List toTypedArray() => _toTypedArray().toDart;
}
/// Writes [color] information into the given [skColor] buffer.
Float32List _populateSkColor(SkFloat32List skColor, ui.Color color) {
final Float32List array = skColor.toTypedArray();
array[0] = color.red / 255.0;
array[1] = color.green / 255.0;
array[2] = color.blue / 255.0;
array[3] = color.alpha / 255.0;
return array;
}
/// Unpacks the [color] into CanvasKit-compatible representation stored
/// in a shared memory location #1.
///
/// Use this only for passing transient data to CanvasKit. Because the
/// memory is shared the value will not persist.
Float32List toSharedSkColor1(ui.Color color) {
return _populateSkColor(_sharedSkColor1, color);
}
final SkFloat32List _sharedSkColor1 = mallocFloat32List(4);
/// Unpacks the [color] into CanvasKit-compatible representation stored
/// in a shared memory location #2.
///
/// Use this only for passing transient data to CanvasKit. Because the
/// memory is shared the value will not persist.
Float32List toSharedSkColor2(ui.Color color) {
return _populateSkColor(_sharedSkColor2, color);
}
final SkFloat32List _sharedSkColor2 = mallocFloat32List(4);
/// Unpacks the [color] into CanvasKit-compatible representation stored
/// in a shared memory location #3.
///
/// Use this only for passing transient data to CanvasKit. Because the
/// memory is shared the value will not persist.
Float32List toSharedSkColor3(ui.Color color) {
return _populateSkColor(_sharedSkColor3, color);
}
final SkFloat32List _sharedSkColor3 = mallocFloat32List(4);
@JS('window.flutterCanvasKit.Path')
@staticInterop
class SkPath {
external factory SkPath();
external factory SkPath.from(SkPath other);
}
extension SkPathExtension on SkPath {
external JSVoid setFillType(SkFillType fillType);
@JS('addArc')
external JSVoid _addArc(
JSFloat32Array oval,
JSNumber startAngleDegrees,
JSNumber sweepAngleDegrees,
);
void addArc(
Float32List oval,
double startAngleDegrees,
double sweepAngleDegrees,
) => _addArc(oval.toJS, startAngleDegrees.toJS, sweepAngleDegrees.toJS);
@JS('addOval')
external JSVoid _addOval(
JSFloat32Array oval,
JSBoolean counterClockWise,
JSNumber startIndex,
);
void addOval(
Float32List oval,
bool counterClockWise,
double startIndex,
) => _addOval(oval.toJS, counterClockWise.toJS, startIndex.toJS);
@JS('addPath')
external JSVoid _addPath(
SkPath other,
JSNumber scaleX,
JSNumber skewX,
JSNumber transX,
JSNumber skewY,
JSNumber scaleY,
JSNumber transY,
JSNumber pers0,
JSNumber pers1,
JSNumber pers2,
JSBoolean extendPath,
);
void addPath(
SkPath other,
double scaleX,
double skewX,
double transX,
double skewY,
double scaleY,
double transY,
double pers0,
double pers1,
double pers2,
bool extendPath,
) => _addPath(other, scaleX.toJS, skewX.toJS, transX.toJS, skewY.toJS,
scaleY.toJS, transY.toJS, pers0.toJS, pers1.toJS, pers2.toJS,
extendPath.toJS);
@JS('addPoly')
external JSVoid _addPoly(
JSFloat32Array points,
JSBoolean close,
);
void addPoly(
Float32List points,
bool close,
) => _addPoly(points.toJS, close.toJS);
@JS('addRRect')
external JSVoid _addRRect(
JSFloat32Array rrect,
JSBoolean counterClockWise,
);
void addRRect(
Float32List rrect,
bool counterClockWise,
) => _addRRect(rrect.toJS, counterClockWise.toJS);
@JS('addRect')
external JSVoid _addRect(
JSFloat32Array rect,
);
void addRect(
Float32List rect,
) => _addRect(rect.toJS);
@JS('arcToOval')
external JSVoid _arcToOval(
JSFloat32Array oval,
JSNumber startAngleDegrees,
JSNumber sweepAngleDegrees,
JSBoolean forceMoveTo,
);
void arcToOval(
Float32List oval,
double startAngleDegrees,
double sweepAngleDegrees,
bool forceMoveTo,
) => _arcToOval(oval.toJS, startAngleDegrees.toJS, sweepAngleDegrees.toJS,
forceMoveTo.toJS);
@JS('arcToRotated')
external JSVoid _arcToRotated(
JSNumber radiusX,
JSNumber radiusY,
JSNumber rotation,
JSBoolean useSmallArc,
JSBoolean counterClockWise,
JSNumber x,
JSNumber y,
);
void arcToRotated(
double radiusX,
double radiusY,
double rotation,
bool useSmallArc,
bool counterClockWise,
double x,
double y,
) => _arcToRotated(radiusX.toJS, radiusY.toJS, rotation.toJS,
useSmallArc.toJS, counterClockWise.toJS,
x.toJS, y.toJS);
external JSVoid close();
@JS('conicTo')
external JSVoid _conicTo(
JSNumber x1,
JSNumber y1,
JSNumber x2,
JSNumber y2,
JSNumber w,
);
void conicTo(
double x1,
double y1,
double x2,
double y2,
double w,
) => _conicTo(x1.toJS, y1.toJS, x2.toJS, y2.toJS, w.toJS);
@JS('contains')
external JSBoolean _contains(
JSNumber x,
JSNumber y,
);
bool contains(
double x,
double y,
) => _contains(x.toJS, y.toJS).toDart;
@JS('cubicTo')
external JSVoid _cubicTo(
JSNumber x1,
JSNumber y1,
JSNumber x2,
JSNumber y2,
JSNumber x3,
JSNumber y3,
);
void cubicTo(
double x1,
double y1,
double x2,
double y2,
double x3,
double y3,
) => _cubicTo(x1.toJS, y1.toJS, x2.toJS, y2.toJS, x3.toJS, y3.toJS);
@JS('getBounds')
external JSFloat32Array _getBounds();
Float32List getBounds() => _getBounds().toDart;
@JS('lineTo')
external JSVoid _lineTo(JSNumber x, JSNumber y);
void lineTo(double x, double y) => _lineTo(x.toJS, y.toJS);
@JS('moveTo')
external JSVoid _moveTo(JSNumber x, JSNumber y);
void moveTo(double x, double y) => _moveTo(x.toJS, y.toJS);
@JS('quadTo')
external JSVoid _quadTo(
JSNumber x1,
JSNumber y1,
JSNumber x2,
JSNumber y2,
);
void quadTo(
double x1,
double y1,
double x2,
double y2,
) => _quadTo(x1.toJS, y1.toJS, x2.toJS, y2.toJS);
@JS('rArcTo')
external JSVoid _rArcTo(
JSNumber x,
JSNumber y,
JSNumber rotation,
JSBoolean useSmallArc,
JSBoolean counterClockWise,
JSNumber deltaX,
JSNumber deltaY,
);
void rArcTo(
double x,
double y,
double rotation,
bool useSmallArc,
bool counterClockWise,
double deltaX,
double deltaY,
) => _rArcTo(x.toJS, y.toJS, rotation.toJS, useSmallArc.toJS,
counterClockWise.toJS, deltaX.toJS, deltaY.toJS);
@JS('rConicTo')
external JSVoid _rConicTo(
JSNumber x1,
JSNumber y1,
JSNumber x2,
JSNumber y2,
JSNumber w,
);
void rConicTo(
double x1,
double y1,
double x2,
double y2,
double w,
) => _rConicTo(x1.toJS, y1.toJS, x2.toJS, y2.toJS, w.toJS);
@JS('rCubicTo')
external JSVoid _rCubicTo(
JSNumber x1,
JSNumber y1,
JSNumber x2,
JSNumber y2,
JSNumber x3,
JSNumber y3,
);
void rCubicTo(
double x1,
double y1,
double x2,
double y2,
double x3,
double y3,
) => _rCubicTo(x1.toJS, y1.toJS, x2.toJS, y2.toJS, x3.toJS, y3.toJS);
@JS('rLineTo')
external JSVoid _rLineTo(JSNumber x, JSNumber y);
void rLineTo(double x, double y) => _rLineTo(x.toJS, y.toJS);
@JS('rMoveTo')
external JSVoid _rMoveTo(JSNumber x, JSNumber y);
void rMoveTo(double x, double y) => _rMoveTo(x.toJS, y.toJS);
@JS('rQuadTo')
external JSVoid _rQuadTo(
JSNumber x1,
JSNumber y1,
JSNumber x2,
JSNumber y2,
);
void rQuadTo(
double x1,
double y1,
double x2,
double y2,
) => _rQuadTo(x1.toJS, y1.toJS, x2.toJS, y2.toJS);
external JSVoid reset();
@JS('toSVGString')
external JSString _toSVGString();
String toSVGString() => _toSVGString().toDart;
@JS('isEmpty')
external JSBoolean _isEmpty();
bool isEmpty() => _isEmpty().toDart;
external SkPath copy();
@JS('transform')
external JSVoid _transform(
JSNumber scaleX,
JSNumber skewX,
JSNumber transX,
JSNumber skewY,
JSNumber scaleY,
JSNumber transY,
JSNumber pers0,
JSNumber pers1,
JSNumber pers2,
);
void transform(
double scaleX,
double skewX,
double transX,
double skewY,
double scaleY,
double transY,
double pers0,
double pers1,
double pers2,
) => _transform(scaleX.toJS, skewX.toJS, transX.toJS,
skewY.toJS, scaleY.toJS, transY.toJS,
pers0.toJS, pers1.toJS, pers2.toJS);
/// Serializes the path into a list of commands.
///
/// The list can be used to create a new [SkPath] using
/// [CanvasKit.Path.MakeFromCmds].
@JS('toCmds')
external JSAny _toCmds();
List<dynamic> toCmds() => _toCmds().toObjectShallow as List<dynamic>;
external JSVoid delete();
}
@JS('window.flutterCanvasKit.ContourMeasureIter')
@staticInterop
class SkContourMeasureIter {
factory SkContourMeasureIter(
SkPath path,
bool forceClosed,
double resScale) => SkContourMeasureIter._(path, forceClosed.toJS,
resScale.toJS);
external factory SkContourMeasureIter._(
SkPath path,
JSBoolean forceClosed,
JSNumber resScale);
}
extension SkContourMeasureIterExtension on SkContourMeasureIter {
external SkContourMeasure? next();
external JSVoid delete();
}
@JS()
@staticInterop
class SkContourMeasure {}
extension SkContourMeasureExtension on SkContourMeasure {
@JS('getSegment')
external SkPath _getSegment(
JSNumber start, JSNumber end, JSBoolean startWithMoveTo);
SkPath getSegment(double start, double end, bool startWithMoveTo) =>
_getSegment(start.toJS, end.toJS, startWithMoveTo.toJS);
@JS('getPosTan')
external JSFloat32Array _getPosTan(JSNumber distance);
Float32List getPosTan(double distance) =>
_getPosTan(distance.toJS).toDart;
@JS('isClosed')
external JSBoolean _isClosed();
bool isClosed() => _isClosed().toDart;
@JS('length')
external JSNumber _length();
double length() => _length().toDartDouble;
external JSVoid delete();
}
// TODO(hterkelsen): Use a shared malloc'ed array for performance.
Float32List toSkRect(ui.Rect rect) {
final Float32List skRect = Float32List(4);
skRect[0] = rect.left;
skRect[1] = rect.top;
skRect[2] = rect.right;
skRect[3] = rect.bottom;
return skRect;
}
ui.Rect fromSkRect(Float32List skRect) {
return ui.Rect.fromLTRB(skRect[0], skRect[1], skRect[2], skRect[3]);
}
ui.Rect rectFromSkIRect(Int32List skIRect) {
return ui.Rect.fromLTRB(
skIRect[0].toDouble(),
skIRect[1].toDouble(),
skIRect[2].toDouble(),
skIRect[3].toDouble(),
);
}
// TODO(hterkelsen): Use a shared malloc'ed array for performance.
Float32List toSkRRect(ui.RRect rrect) {
final Float32List skRRect = Float32List(12);
skRRect[0] = rrect.left;
skRRect[1] = rrect.top;
skRRect[2] = rrect.right;
skRRect[3] = rrect.bottom;
skRRect[4] = rrect.tlRadiusX;
skRRect[5] = rrect.tlRadiusY;
skRRect[6] = rrect.trRadiusX;
skRRect[7] = rrect.trRadiusY;
skRRect[8] = rrect.brRadiusX;
skRRect[9] = rrect.brRadiusY;
skRRect[10] = rrect.blRadiusX;
skRRect[11] = rrect.blRadiusY;
return skRRect;
}
// TODO(hterkelsen): Use a shared malloc'ed array for performance.
Float32List toOuterSkRect(ui.RRect rrect) {
final Float32List skRect = Float32List(4);
skRect[0] = rrect.left;
skRect[1] = rrect.top;
skRect[2] = rrect.right;
skRect[3] = rrect.bottom;
return skRect;
}
/// Encodes a list of offsets to CanvasKit-compatible point array.
///
/// Uses `CanvasKit.Malloc` to allocate storage for the points in the WASM
/// memory to avoid unnecessary copying. Unless CanvasKit takes ownership of
/// the list the returned list must be explicitly freed using
/// [free].
SkFloat32List toMallocedSkPoints(List<ui.Offset> points) {
final int len = points.length;
final SkFloat32List skPoints = mallocFloat32List(len * 2);
final Float32List list = skPoints.toTypedArray();
for (int i = 0; i < len; i++) {
list[2 * i] = points[i].dx;
list[2 * i + 1] = points[i].dy;
}
return skPoints;
}
/// Converts a list of [ui.Offset] into a flat list of points.
Float32List toFlatSkPoints(List<ui.Offset> points) {
final int len = points.length;
final Float32List result = Float32List(len * 2);
for (int i = 0; i < len; i++) {
result[2 * i] = points[i].dx;
result[2 * i + 1] = points[i].dy;
}
return result;
}
/// Converts a list of [ui.Color] into a flat list of ints.
Uint32List toFlatColors(List<ui.Color> colors) {
final int len = colors.length;
final Uint32List result = Uint32List(len);
for (int i = 0; i < len; i++) {
result[i] = colors[i].value;
}
return result;
}
Uint16List toUint16List(List<int> ints) {
final int len = ints.length;
final Uint16List result = Uint16List(len);
for (int i = 0; i < len; i++) {
result[i] = ints[i];
}
return result;
}
@JS('window.flutterCanvasKit.PictureRecorder')
@staticInterop
class SkPictureRecorder {
external factory SkPictureRecorder();
}
extension SkPictureRecorderExtension on SkPictureRecorder {
@JS('beginRecording')
external SkCanvas _beginRecording(
JSFloat32Array bounds, JSBoolean computeBounds);
SkCanvas beginRecording(Float32List bounds) =>
_beginRecording(bounds.toJS, true.toJS);
external SkPicture finishRecordingAsPicture();
external JSVoid delete();
}
/// We do not use the `delete` method (which may be removed in the future anyway).
///
/// By Skia coding convention raw pointers should always be treated as
/// "borrowed", i.e. their memory is managed by other objects. In the case of
/// [SkCanvas] it is managed by [SkPictureRecorder].
@JS()
@anonymous
@staticInterop
class SkCanvas {}
extension SkCanvasExtension on SkCanvas {
@JS('clear')
external JSVoid _clear(JSFloat32Array color);
void clear(Float32List color) => _clear(color.toJS);
@JS('clipPath')
external JSVoid _clipPath(
SkPath path,
SkClipOp clipOp,
JSBoolean doAntiAlias,
);
void clipPath(
SkPath path,
SkClipOp clipOp,
bool doAntiAlias,
) => _clipPath(path, clipOp, doAntiAlias.toJS);
@JS('clipRRect')
external JSVoid _clipRRect(
JSFloat32Array rrect,
SkClipOp clipOp,
JSBoolean doAntiAlias,
);
void clipRRect(
Float32List rrect,
SkClipOp clipOp,
bool doAntiAlias,
) => _clipRRect(rrect.toJS, clipOp, doAntiAlias.toJS);
@JS('clipRect')
external JSVoid _clipRect(
JSFloat32Array rect,
SkClipOp clipOp,
JSBoolean doAntiAlias,
);
void clipRect(
Float32List rect,
SkClipOp clipOp,
bool doAntiAlias,
) => _clipRect(rect.toJS, clipOp, doAntiAlias.toJS);
@JS('getDeviceClipBounds')
external JSInt32Array _getDeviceClipBounds();
Int32List getDeviceClipBounds() => _getDeviceClipBounds().toDart;
@JS('drawArc')
external JSVoid _drawArc(
JSFloat32Array oval,
JSNumber startAngleDegrees,
JSNumber sweepAngleDegrees,
JSBoolean useCenter,
SkPaint paint,
);
void drawArc(
Float32List oval,
double startAngleDegrees,
double sweepAngleDegrees,
bool useCenter,
SkPaint paint,
) => _drawArc(oval.toJS, startAngleDegrees.toJS, sweepAngleDegrees.toJS,
useCenter.toJS, paint);
@JS('drawAtlas')
external JSVoid _drawAtlas(
SkImage image,
JSFloat32Array rects,
JSFloat32Array rstTransforms,
SkPaint paint,
SkBlendMode blendMode,
JSUint32Array? colors,
);
void drawAtlas(
SkImage image,
Float32List rects,
Float32List rstTransforms,
SkPaint paint,
SkBlendMode blendMode,
Uint32List? colors,
) => _drawAtlas(image, rects.toJS, rstTransforms.toJS, paint,
blendMode, colors?.toJS);
@JS('drawCircle')
external JSVoid _drawCircle(
JSNumber x,
JSNumber y,
JSNumber radius,
SkPaint paint,
);
void drawCircle(
double x,
double y,
double radius,
SkPaint paint,
) => _drawCircle(x.toJS, y.toJS, radius.toJS, paint);
@JS('drawColorInt')
external JSVoid _drawColorInt(
JSNumber color,
SkBlendMode blendMode,
);
void drawColorInt(
double color,
SkBlendMode blendMode,
) => _drawColorInt(color.toJS, blendMode);
@JS('drawDRRect')
external JSVoid _drawDRRect(
JSFloat32Array outer,
JSFloat32Array inner,
SkPaint paint,
);
void drawDRRect(
Float32List outer,
Float32List inner,
SkPaint paint,
) => _drawDRRect(outer.toJS, inner.toJS, paint);
@JS('drawImageCubic')
external JSVoid _drawImageCubic(
SkImage image,
JSNumber x,
JSNumber y,
JSNumber B,
JSNumber C,
SkPaint paint,
);
void drawImageCubic(
SkImage image,
double x,
double y,
double B,
double C,
SkPaint paint,
) => _drawImageCubic(image, x.toJS, y.toJS, B.toJS, C.toJS, paint);
@JS('drawImageOptions')
external JSVoid _drawImageOptions(
SkImage image,
JSNumber x,
JSNumber y,
SkFilterMode filterMode,
SkMipmapMode mipmapMode,
SkPaint paint,
);
void drawImageOptions(
SkImage image,
double x,
double y,
SkFilterMode filterMode,
SkMipmapMode mipmapMode,
SkPaint paint,
) => _drawImageOptions(image, x.toJS, y.toJS, filterMode, mipmapMode, paint);
@JS('drawImageRectCubic')
external JSVoid _drawImageRectCubic(
SkImage image,
JSFloat32Array src,
JSFloat32Array dst,
JSNumber B,
JSNumber C,
SkPaint paint,
);
void drawImageRectCubic(
SkImage image,
Float32List src,
Float32List dst,
double B,
double C,
SkPaint paint,
) => _drawImageRectCubic(image, src.toJS, dst.toJS, B.toJS, C.toJS, paint);
@JS('drawImageRectOptions')
external JSVoid _drawImageRectOptions(
SkImage image,
JSFloat32Array src,
JSFloat32Array dst,
SkFilterMode filterMode,
SkMipmapMode mipmapMode,
SkPaint paint,
);
void drawImageRectOptions(
SkImage image,
Float32List src,
Float32List dst,
SkFilterMode filterMode,
SkMipmapMode mipmapMode,
SkPaint paint,
) => _drawImageRectOptions(image, src.toJS, dst.toJS, filterMode, mipmapMode,
paint);
@JS('drawImageNine')
external JSVoid _drawImageNine(
SkImage image,
JSFloat32Array center,
JSFloat32Array dst,
SkFilterMode filterMode,
SkPaint paint,
);
void drawImageNine(
SkImage image,
Float32List center,
Float32List dst,
SkFilterMode filterMode,
SkPaint paint,
) => _drawImageNine(image, center.toJS, dst.toJS, filterMode, paint);
@JS('drawLine')
external JSVoid _drawLine(
JSNumber x1,
JSNumber y1,
JSNumber x2,
JSNumber y2,
SkPaint paint,
);
void drawLine(
double x1,
double y1,
double x2,
double y2,
SkPaint paint,
) => _drawLine(x1.toJS, y1.toJS, x2.toJS, y2.toJS, paint);
@JS('drawOval')
external JSVoid _drawOval(
JSFloat32Array rect,
SkPaint paint,
);
void drawOval(
Float32List rect,
SkPaint paint,
) => _drawOval(rect.toJS, paint);
external JSVoid drawPaint(
SkPaint paint,
);
external JSVoid drawPath(
SkPath path,
SkPaint paint,
);
@JS('drawPoints')
external JSVoid _drawPoints(
SkPointMode pointMode,
JSFloat32Array points,
SkPaint paint,
);
void drawPoints(
SkPointMode pointMode,
Float32List points,
SkPaint paint,
) => _drawPoints(pointMode, points.toJS, paint);
@JS('drawRRect')
external JSVoid _drawRRect(
JSFloat32Array rrect,
SkPaint paint,
);
void drawRRect(
Float32List rrect,
SkPaint paint,
) => _drawRRect(rrect.toJS, paint);
@JS('drawRect')
external JSVoid _drawRect(
JSFloat32Array rect,
SkPaint paint,
);
void drawRect(
Float32List rect,
SkPaint paint,
) => _drawRect(rect.toJS, paint);
@JS('drawShadow')
external JSVoid _drawShadow(
SkPath path,
JSFloat32Array zPlaneParams,
JSFloat32Array lightPos,
JSNumber lightRadius,
JSFloat32Array ambientColor,
JSFloat32Array spotColor,
JSNumber flags,
);
void drawShadow(
SkPath path,
Float32List zPlaneParams,
Float32List lightPos,
double lightRadius,
Float32List ambientColor,
Float32List spotColor,
double flags,
) => _drawShadow(path, zPlaneParams.toJS, lightPos.toJS, lightRadius.toJS,
ambientColor.toJS, spotColor.toJS, flags.toJS);
external JSVoid drawVertices(
SkVertices vertices,
SkBlendMode blendMode,
SkPaint paint,
);
@JS('save')
external JSNumber _save();
double save() => _save().toDartDouble;
@JS('getSaveCount')
external JSNumber _getSaveCount();
double getSaveCount() => _getSaveCount().toDartDouble;
@JS('saveLayer')
external JSVoid _saveLayer(
SkPaint? paint,
JSFloat32Array? bounds,
SkImageFilter? backdrop,
JSNumber? flags,
);
void saveLayer(
SkPaint? paint,
Float32List? bounds,
SkImageFilter? backdrop,
int? flags,
) => _saveLayer(paint, bounds?.toJS, backdrop, flags?.toJS);
external JSVoid restore();
@JS('restoreToCount')
external JSVoid _restoreToCount(JSNumber count);
void restoreToCount(double count) => _restoreToCount(count.toJS);
@JS('rotate')
external JSVoid _rotate(
JSNumber angleDegrees,
JSNumber px,
JSNumber py,
);
void rotate(
double angleDegrees,
double px,
double py,
) => _rotate(angleDegrees.toJS, px.toJS, py.toJS);
@JS('scale')
external JSVoid _scale(JSNumber x, JSNumber y);
void scale(double x, double y) => _scale(x.toJS, y.toJS);
@JS('skew')
external JSVoid _skew(JSNumber x, JSNumber y);
void skew(double x, double y) => _skew(x.toJS, y.toJS);
@JS('concat')
external JSVoid _concat(JSFloat32Array matrix);
void concat(Float32List matrix) => _concat(matrix.toJS);
@JS('translate')
external JSVoid _translate(JSNumber x, JSNumber y);
void translate(double x, double y) => _translate(x.toJS, y.toJS);
@JS('getLocalToDevice')
external JSAny _getLocalToDevice();
List<dynamic> getLocalToDevice() => _getLocalToDevice().toObjectShallow as
List<dynamic>;
external JSVoid drawPicture(SkPicture picture);
@JS('drawParagraph')
external JSVoid _drawParagraph(
SkParagraph paragraph,
JSNumber x,
JSNumber y,
);
void drawParagraph(
SkParagraph paragraph,
double x,
double y,
) => _drawParagraph(paragraph, x.toJS, y.toJS);
}
@JS()
@anonymous
@staticInterop
class SkPicture {}
extension SkPictureExtension on SkPicture {
external JSVoid delete();
@JS('cullRect')
external JSFloat32Array _cullRect();
Float32List cullRect() => _cullRect().toDart;
@JS('approximateBytesUsed')
external JSNumber _approximateBytesUsed();
int approximateBytesUsed() => _approximateBytesUsed().toDartInt;
}
@JS()
@anonymous
@staticInterop
class SkParagraphBuilderNamespace {}
extension SkParagraphBuilderNamespaceExtension on SkParagraphBuilderNamespace {
external SkParagraphBuilder MakeFromFontCollection(
SkParagraphStyle paragraphStyle,
SkFontCollection? fontCollection,
);
bool RequiresClientICU() {
if (!js_util.hasProperty(this, 'RequiresClientICU')) {
return false;
}
return js_util.callMethod(this, 'RequiresClientICU', const <Object>[],) as bool;
}
}
@JS()
@anonymous
@staticInterop
class SkParagraphBuilder {}
extension SkParagraphBuilderExtension on SkParagraphBuilder {
@JS('addText')
external JSVoid _addText(JSString text);
void addText(String text) => _addText(text.toJS);
external JSVoid pushStyle(SkTextStyle textStyle);
external JSVoid pushPaintStyle(
SkTextStyle textStyle, SkPaint foreground, SkPaint background);
external JSVoid pop();
@JS('addPlaceholder')
external JSVoid _addPlaceholder(
JSNumber width,
JSNumber height,
SkPlaceholderAlignment alignment,
SkTextBaseline baseline,
JSNumber offset,
);
void addPlaceholder(
double width,
double height,
SkPlaceholderAlignment alignment,
SkTextBaseline baseline,
double offset,
) => _addPlaceholder(width.toJS, height.toJS, alignment,
baseline, offset.toJS);
@JS('getText')
external JSString _getTextUtf8();
String getTextUtf8() => _getTextUtf8().toDart;
// SkParagraphBuilder.getText() returns a utf8 string, we need to decode it
// into a utf16 string.
String getText() => utf8.decode(getTextUtf8().codeUnits);
@JS('setWordsUtf8')
external JSVoid _setWordsUtf8(JSUint32Array words);
void setWordsUtf8(Uint32List words) => _setWordsUtf8(words.toJS);
@JS('setWordsUtf16')
external JSVoid _setWordsUtf16(JSUint32Array words);
void setWordsUtf16(Uint32List words) => _setWordsUtf16(words.toJS);
@JS('setGraphemeBreaksUtf8')
external JSVoid _setGraphemeBreaksUtf8(JSUint32Array graphemes);
void setGraphemeBreaksUtf8(Uint32List graphemes) =>
_setGraphemeBreaksUtf8(graphemes.toJS);
@JS('setGraphemeBreaksUtf16')
external JSVoid _setGraphemeBreaksUtf16(JSUint32Array graphemes);
void setGraphemeBreaksUtf16(Uint32List graphemes) =>
_setGraphemeBreaksUtf16(graphemes.toJS);
@JS('setLineBreaksUtf8')
external JSVoid _setLineBreaksUtf8(JSUint32Array lineBreaks);
void setLineBreaksUtf8(Uint32List lineBreaks) =>
_setLineBreaksUtf8(lineBreaks.toJS);
@JS('setLineBreaksUtf16')
external JSVoid _setLineBreaksUtf16(JSUint32Array lineBreaks);
void setLineBreaksUtf16(Uint32List lineBreaks) =>
_setLineBreaksUtf16(lineBreaks.toJS);
external SkParagraph build();
external JSVoid delete();
}
@JS()
@anonymous
@staticInterop
class SkParagraphStyle {}
@JS()
@anonymous
@staticInterop
class SkParagraphStyleProperties {
external factory SkParagraphStyleProperties();
}
extension SkParagraphStylePropertiesExtension on SkParagraphStyleProperties {
external set textAlign(SkTextAlign? value);
external set textDirection(SkTextDirection? value);
@JS('heightMultiplier')
external set _heightMultiplier(JSNumber? value);
set heightMultiplier(double? value) => _heightMultiplier = value?.toJS;
external set textHeightBehavior(SkTextHeightBehavior? value);
@JS('maxLines')
external set _maxLines(JSNumber? value);
set maxLines(int? value) => _maxLines = value?.toJS;
@JS('ellipsis')
external set _ellipsis(JSString? value);
set ellipsis(String? value) => _ellipsis = value?.toJS;
external set textStyle(SkTextStyleProperties? value);
external set strutStyle(SkStrutStyleProperties? strutStyle);
@JS('replaceTabCharacters')
external set _replaceTabCharacters(JSBoolean? bool);
set replaceTabCharacters(bool? bool) => _replaceTabCharacters = bool?.toJS;
external set applyRoundingHack(bool applyRoundingHack);
}
@JS()
@staticInterop
class SkTextStyle {}
@JS()
@staticInterop
class SkTextDecorationStyleEnum {}
extension SkTextDecorationStyleEnumExtension on SkTextDecorationStyleEnum {
external SkTextDecorationStyle get Solid;
external SkTextDecorationStyle get Double;
external SkTextDecorationStyle get Dotted;
external SkTextDecorationStyle get Dashed;
external SkTextDecorationStyle get Wavy;
}
@JS()
@staticInterop
class SkTextDecorationStyle {}
extension SkTextDecorationStyleExtension on SkTextDecorationStyle {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkTextDecorationStyle> _skTextDecorationStyles =
<SkTextDecorationStyle>[
canvasKit.DecorationStyle.Solid,
canvasKit.DecorationStyle.Double,
canvasKit.DecorationStyle.Dotted,
canvasKit.DecorationStyle.Dashed,
canvasKit.DecorationStyle.Wavy,
];
SkTextDecorationStyle toSkTextDecorationStyle(ui.TextDecorationStyle style) {
return _skTextDecorationStyles[style.index];
}
@JS()
@staticInterop
class SkTextBaselineEnum {}
extension SkTextBaselineEnumExtension on SkTextBaselineEnum {
external SkTextBaseline get Alphabetic;
external SkTextBaseline get Ideographic;
}
@JS()
@staticInterop
class SkTextBaseline {}
extension SkTextBaselineExtension on SkTextBaseline {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkTextBaseline> _skTextBaselines = <SkTextBaseline>[
canvasKit.TextBaseline.Alphabetic,
canvasKit.TextBaseline.Ideographic,
];
SkTextBaseline toSkTextBaseline(ui.TextBaseline baseline) {
return _skTextBaselines[baseline.index];
}
@JS()
@staticInterop
class SkPlaceholderAlignmentEnum {}
extension SkPlaceholderAlignmentEnumExtension on SkPlaceholderAlignmentEnum {
external SkPlaceholderAlignment get Baseline;
external SkPlaceholderAlignment get AboveBaseline;
external SkPlaceholderAlignment get BelowBaseline;
external SkPlaceholderAlignment get Top;
external SkPlaceholderAlignment get Bottom;
external SkPlaceholderAlignment get Middle;
}
@JS()
@staticInterop
class SkPlaceholderAlignment {}
extension SkPlaceholderAlignmentExtension on SkPlaceholderAlignment {
@JS('value')
external JSNumber get _value;
double get value => _value.toDartDouble;
}
final List<SkPlaceholderAlignment> _skPlaceholderAlignments =
<SkPlaceholderAlignment>[
canvasKit.PlaceholderAlignment.Baseline,
canvasKit.PlaceholderAlignment.AboveBaseline,
canvasKit.PlaceholderAlignment.BelowBaseline,
canvasKit.PlaceholderAlignment.Top,
canvasKit.PlaceholderAlignment.Bottom,
canvasKit.PlaceholderAlignment.Middle,
];
SkPlaceholderAlignment toSkPlaceholderAlignment(
ui.PlaceholderAlignment alignment) {
return _skPlaceholderAlignments[alignment.index];
}
@JS()
@anonymous
@staticInterop
class SkTextStyleProperties {
external factory SkTextStyleProperties();
}
extension SkTextStylePropertiesExtension on SkTextStyleProperties {
@JS('backgroundColor')
external set _backgroundColor(JSFloat32Array? value);
set backgroundColor(Float32List? value) =>
_backgroundColor = value?.toJS;
@JS('color')
external set _color(JSFloat32Array? value);
set color(Float32List? value) => _color = value?.toJS;
@JS('foregroundColor')
external set _foregroundColor(JSFloat32Array? value);
set foregroundColor(Float32List? value) => _foregroundColor = value?.toJS;
@JS('decoration')
external set _decoration(JSNumber? value);
set decoration(int? value) => _decoration = value?.toJS;
@JS('decorationThickness')
external set _decorationThickness(JSNumber? value);
set decorationThickness(double? value) =>
_decorationThickness = value?.toJS;
@JS('decorationColor')
external set _decorationColor(JSFloat32Array? value);
set decorationColor(Float32List? value) => _decorationColor = value?.toJS;
external set decorationStyle(SkTextDecorationStyle? value);
external set textBaseline(SkTextBaseline? value);
@JS('fontSize')
external set _fontSize(JSNumber? value);
set fontSize(double? value) => _fontSize = value?.toJS;
@JS('letterSpacing')
external set _letterSpacing(JSNumber? value);
set letterSpacing(double? value) => _letterSpacing = value?.toJS;
@JS('wordSpacing')
external set _wordSpacing(JSNumber? value);
set wordSpacing(double? value) => _wordSpacing = value?.toJS;
@JS('heightMultiplier')
external set _heightMultiplier(JSNumber? value);
set heightMultiplier(double? value) => _heightMultiplier = value?.toJS;
@JS('halfLeading')
external set _halfLeading(JSBoolean? value);
set halfLeading(bool? value) => _halfLeading = value?.toJS;
@JS('locale')
external set _locale(JSString? value);
set locale(String? value) => _locale = value?.toJS;
@JS('fontFamilies')
external set _fontFamilies(JSAny? value);
set fontFamilies(List<String>? value) => _fontFamilies = value?.toJSAnyShallow;
external set fontStyle(SkFontStyle? value);
@JS('shadows')
external set _shadows(JSArray<JSAny?>? value);
set shadows(List<SkTextShadow>? value) =>
// TODO(joshualitt): remove this cast when we reify JS types on JS
// backends.
// ignore: unnecessary_cast
_shadows = (value as List<JSAny>?)?.toJS;
@JS('fontFeatures')
external set _fontFeatures(JSArray<JSAny?>? value);
set fontFeatures(List<SkFontFeature>? value) =>
// TODO(joshualitt): remove this cast when we reify JS types on JS
// backends.
// ignore: unnecessary_cast
_fontFeatures = (value as List<JSAny>?)?.toJS;
@JS('fontVariations')
external set _fontVariations(JSArray<JSAny?>? value);
set fontVariations(List<SkFontVariation>? value) =>
// TODO(joshualitt): remove this cast when we reify JS types on JS
// backends.
// ignore: unnecessary_cast
_fontVariations = (value as List<JSAny>?)?.toJS;
}
@JS()
@anonymous
@staticInterop
class SkStrutStyleProperties {
external factory SkStrutStyleProperties();
}
extension SkStrutStylePropertiesExtension on SkStrutStyleProperties {
@JS('fontFamilies')
external set _fontFamilies(JSAny? value);
set fontFamilies(List<String>? value) =>
_fontFamilies = value?.toJSAnyShallow;
external set fontStyle(SkFontStyle? value);
@JS('fontSize')
external set _fontSize(JSNumber? value);
set fontSize(double? value) => _fontSize = value?.toJS;
@JS('heightMultiplier')
external set _heightMultiplier(JSNumber? value);
set heightMultiplier(double? value) => _heightMultiplier = value?.toJS;
@JS('halfLeading')
external set _halfLeading(JSBoolean? value);
set halfLeading(bool? value) => _halfLeading = value?.toJS;
@JS('leading')
external set _leading(JSNumber? value);
set leading(double? value) => _leading = value?.toJS;
@JS('strutEnabled')
external set _strutEnabled(JSBoolean? value);
set strutEnabled(bool? value) => _strutEnabled = value?.toJS;
@JS('forceStrutHeight')
external set _forceStrutHeight(JSBoolean? value);
set forceStrutHeight(bool? value) => _forceStrutHeight = value?.toJS;
}
@JS()
@anonymous
@staticInterop
class SkFontStyle {
external factory SkFontStyle();
}
extension SkFontStyleExtension on SkFontStyle {
external set weight(SkFontWeight? value);
external set slant(SkFontSlant? value);
}
@JS()
@anonymous
@staticInterop
class SkTextShadow {
external factory SkTextShadow();
}
extension SkTextShadowExtension on SkTextShadow {
@JS('color')
external set _color(JSFloat32Array? value);
set color(Float32List? value) => _color = value?.toJS;
@JS('offset')
external set _offset(JSFloat32Array? value);
set offset(Float32List? value) => _offset = value?.toJS;
@JS('blurRadius')
external set _blurRadius(JSNumber? value);
set blurRadius(double? value) => _blurRadius = value?.toJS;
}
@JS()
@anonymous
@staticInterop
class SkFontFeature {
external factory SkFontFeature();
}
extension SkFontFeatureExtension on SkFontFeature {
@JS('name')
external set _name(JSString? value);
set name(String? value) => _name = value?.toJS;
@JS('value')
external set _value(JSNumber? value);
set value(int? v) => _value = v?.toJS;
}
@JS()
@anonymous
@staticInterop
class SkFontVariation {
external factory SkFontVariation();
}
extension SkFontVariationExtension on SkFontVariation {
@JS('axis')
external set _axis(JSString? value);
set axis(String? value) => _axis = value?.toJS;
@JS('value')
external set _value(JSNumber? value);
set value(double? v) => _value = v?.toJS;
}
@JS()
@anonymous
@staticInterop
class SkTypeface {}
@JS('window.flutterCanvasKit.Font')
@staticInterop
class SkFont {
external factory SkFont(SkTypeface typeface);
}
extension SkFontExtension on SkFont {
@JS('getGlyphIDs')
external JSUint16Array _getGlyphIDs(JSString text);
Uint16List getGlyphIDs(String text) => _getGlyphIDs(text.toJS).toDart;
@JS('getGlyphBounds')
external JSVoid _getGlyphBounds(
JSAny glyphs, SkPaint? paint, JSUint8Array? output);
void getGlyphBounds(
List<int> glyphs, SkPaint? paint, Uint8List? output) =>
_getGlyphBounds(glyphs.toJSAnyShallow, paint, output?.toJS);
}
@JS()
@anonymous
@staticInterop
class SkFontMgr {}
extension SkFontMgrExtension on SkFontMgr {
@JS('getFamilyName')
external JSString? _getFamilyName(JSNumber fontId);
String? getFamilyName(double fontId) => _getFamilyName(fontId.toJS)?.toDart;
external JSVoid delete();
@JS('MakeTypefaceFromData')
external SkTypeface? _MakeTypefaceFromData(JSUint8Array font);
SkTypeface? MakeTypefaceFromData(Uint8List font) =>
_MakeTypefaceFromData(font.toJS);
}
@JS('window.flutterCanvasKit.TypefaceFontProvider')
@staticInterop
class TypefaceFontProvider extends SkFontMgr {
}
extension TypefaceFontProviderExtension on TypefaceFontProvider {
@JS('registerFont')
external JSVoid _registerFont(JSUint8Array font, JSString family);
void registerFont(Uint8List font, String family) =>
_registerFont(font.toJS, family.toJS);
}
@JS()
@anonymous
@staticInterop
class SkFontCollection {}
extension SkFontCollectionExtension on SkFontCollection {
external void enableFontFallback();
external void setDefaultFontManager(TypefaceFontProvider? fontManager);
external void delete();
}
@JS()
@anonymous
@staticInterop
class SkLineMetrics {}
extension SkLineMetricsExtension on SkLineMetrics {
@JS('startIndex')
external JSNumber get _startIndex;
double get startIndex => _startIndex.toDartDouble;
@JS('endIndex')
external JSNumber get _endIndex;
double get endIndex => _endIndex.toDartDouble;
@JS('endExcludingWhitespaces')
external JSNumber get _endExcludingWhitespaces;
double get endExcludingWhitespaces => _endExcludingWhitespaces.toDartDouble;
@JS('endIncludingNewline')
external JSNumber get _endIncludingNewline;
double get endIncludingNewline => _endIncludingNewline.toDartDouble;
@JS('isHardBreak')
external JSBoolean get _isHardBreak;
bool get isHardBreak => _isHardBreak.toDart;
@JS('ascent')
external JSNumber get _ascent;
double get ascent => _ascent.toDartDouble;
@JS('descent')
external JSNumber get _descent;
double get descent => _descent.toDartDouble;
@JS('height')
external JSNumber get _height;
double get height => _height.toDartDouble;
@JS('width')
external JSNumber get _width;
double get width => _width.toDartDouble;
@JS('left')
external JSNumber get _left;
double get left => _left.toDartDouble;
@JS('baseline')
external JSNumber get _baseline;
double get baseline => _baseline.toDartDouble;
@JS('lineNumber')
external JSNumber get _lineNumber;
double get lineNumber => _lineNumber.toDartDouble;
}
@JS()
@anonymous
@staticInterop
class SkGlyphClusterInfo {}
extension SkGlyphClusterInfoExtension on SkGlyphClusterInfo {
@JS('graphemeLayoutBounds')
external JSArray<JSAny?> get _bounds;
@JS('dir')
external SkTextDirection get _direction;
@JS('graphemeClusterTextRange')
external SkTextRange get _textRange;
ui.GlyphInfo get _glyphInfo {
final List<JSNumber> list = _bounds.toDart.cast<JSNumber>();
final ui.Rect bounds = ui.Rect.fromLTRB(list[0].toDartDouble, list[1].toDartDouble, list[2].toDartDouble, list[3].toDartDouble);
final ui.TextRange textRange = ui.TextRange(start: _textRange.start.toInt(), end: _textRange.end.toInt());
return ui.GlyphInfo(bounds, textRange, ui.TextDirection.values[_direction.value.toInt()]);
}
}
@JS()
@anonymous
@staticInterop
class SkRectWithDirection {}
extension SkRectWithDirectionExtension on SkRectWithDirection {
@JS('rect')
external JSFloat32Array get _rect;
Float32List get rect => _rect.toDart;
@JS('rect')
external set _rect(JSFloat32Array rect);
set rect(Float32List r) => _rect = r.toJS;
external SkTextDirection dir;
}
@JS()
@anonymous
@staticInterop
class SkParagraph {}
extension SkParagraphExtension on SkParagraph {
@JS('getAlphabeticBaseline')
external JSNumber _getAlphabeticBaseline();
double getAlphabeticBaseline() => _getAlphabeticBaseline().toDartDouble;
@JS('didExceedMaxLines')
external JSBoolean _didExceedMaxLines();
bool didExceedMaxLines() => _didExceedMaxLines().toDart;
@JS('getHeight')
external JSNumber _getHeight();
double getHeight() => _getHeight().toDartDouble;
@JS('getIdeographicBaseline')
external JSNumber _getIdeographicBaseline();
double getIdeographicBaseline() => _getIdeographicBaseline().toDartDouble;
@JS('getLineMetrics')
external JSArray<JSAny?> _getLineMetrics();
List<SkLineMetrics> getLineMetrics() =>
_getLineMetrics().toDart.cast<SkLineMetrics>();
@JS('getLineMetricsAt')
external SkLineMetrics? _getLineMetricsAt(JSNumber index);
SkLineMetrics? getLineMetricsAt(double index) => _getLineMetricsAt(index.toJS);
@JS('getNumberOfLines')
external JSNumber _getNumberOfLines();
double getNumberOfLines() => _getNumberOfLines().toDartDouble;
@JS('getLineNumberAt')
external JSNumber _getLineNumberAt(JSNumber index);
double getLineNumberAt(double index) => _getLineNumberAt(index.toJS).toDartDouble;
@JS('getLongestLine')
external JSNumber _getLongestLine();
double getLongestLine() => _getLongestLine().toDartDouble;
@JS('getMaxIntrinsicWidth')
external JSNumber _getMaxIntrinsicWidth();
double getMaxIntrinsicWidth() => _getMaxIntrinsicWidth().toDartDouble;
@JS('getMinIntrinsicWidth')
external JSNumber _getMinIntrinsicWidth();
double getMinIntrinsicWidth() => _getMinIntrinsicWidth().toDartDouble;
@JS('getMaxWidth')
external JSNumber _getMaxWidth();
double getMaxWidth() => _getMaxWidth().toDartDouble;
@JS('getRectsForRange')
external JSArray<JSAny?> _getRectsForRange(
JSNumber start,
JSNumber end,
SkRectHeightStyle heightStyle,
SkRectWidthStyle widthStyle,
);
List<SkRectWithDirection> getRectsForRange(
double start,
double end,
SkRectHeightStyle heightStyle,
SkRectWidthStyle widthStyle,
) => _getRectsForRange(start.toJS, end.toJS, heightStyle,
widthStyle).toDart.cast<SkRectWithDirection>();
@JS('getRectsForPlaceholders')
external JSArray<JSAny?> _getRectsForPlaceholders();
List<SkRectWithDirection> getRectsForPlaceholders() =>
_getRectsForPlaceholders().toDart.cast<SkRectWithDirection>();
@JS('getGlyphPositionAtCoordinate')
external SkTextPosition _getGlyphPositionAtCoordinate(
JSNumber x,
JSNumber y,
);
SkTextPosition getGlyphPositionAtCoordinate(
double x,
double y,
) => _getGlyphPositionAtCoordinate(x.toJS, y.toJS);
@JS('getGlyphInfoAt')
external SkGlyphClusterInfo? _getGlyphInfoAt(JSNumber position);
ui.GlyphInfo? getGlyphInfoAt(double position) => _getGlyphInfoAt(position.toJS)?._glyphInfo;
@JS('getClosestGlyphInfoAtCoordinate')
external SkGlyphClusterInfo? _getClosestGlyphInfoAtCoordinate(JSNumber x, JSNumber y);
ui.GlyphInfo? getClosestGlyphInfoAt(double x, double y) => _getClosestGlyphInfoAtCoordinate(x.toJS, y.toJS)?._glyphInfo;
@JS('getWordBoundary')
external SkTextRange _getWordBoundary(JSNumber position);
SkTextRange getWordBoundary(double position) =>
_getWordBoundary(position.toJS);
@JS('layout')
external JSVoid _layout(JSNumber width);
void layout(double width) => _layout(width.toJS);
external JSVoid delete();
}
@JS()
@staticInterop
class SkTextPosition {}
extension SkTextPositionExtnsion on SkTextPosition {
external SkAffinity get affinity;
@JS('pos')
external JSNumber get _pos;
double get pos => _pos.toDartDouble;
}
@JS()
@staticInterop
class SkTextRange {}
extension SkTextRangeExtension on SkTextRange {
@JS('start')
external JSNumber get _start;
double get start => _start.toDartDouble;
@JS('end')
external JSNumber get _end;
double get end => _end.toDartDouble;
}
@JS()
@anonymous
@staticInterop
class SkVertices {}
extension SkVerticesExtension on SkVertices {
external JSVoid delete();
}
@JS()
@anonymous
@staticInterop
class SkTonalColors {
factory SkTonalColors({
required Float32List ambient,
required Float32List spot,
}) => SkTonalColors._(ambient: ambient.toJS, spot: spot.toJS);
external factory SkTonalColors._({
required JSFloat32Array ambient,
required JSFloat32Array spot,
});
}
extension SkTonalColorsExtension on SkTonalColors {
@JS('ambient')
external JSFloat32Array get _ambient;
Float32List get ambient => _ambient.toDart;
@JS('spot')
external JSFloat32Array get _spot;
Float32List get spot => _spot.toDart;
}
@JS()
@staticInterop
class SkFontMgrNamespace {}
extension SkFontMgrNamespaceExtension on SkFontMgrNamespace {
// TODO(yjbanov): can this be made non-null? It returns null in our unit-tests right now.
@JS('FromData')
external SkFontMgr? _FromData(JSAny fonts);
SkFontMgr? FromData(List<Uint8List> fonts) => _FromData(fonts.toJSAnyShallow);
}
@JS()
@staticInterop
class TypefaceFontProviderNamespace {}
extension TypefaceFontProviderNamespaceExtension on TypefaceFontProviderNamespace {
external TypefaceFontProvider Make();
}
@JS()
@staticInterop
class FontCollectionNamespace{}
extension FontCollectionNamespaceExtension on FontCollectionNamespace {
external SkFontCollection Make();
}
@JS()
@anonymous
@staticInterop
class SkTypefaceFactory {}
extension SkTypefaceFactoryExtension on SkTypefaceFactory {
@JS('MakeFreeTypeFaceFromData')
external SkTypeface? _MakeFreeTypeFaceFromData(JSArrayBuffer fontData);
SkTypeface? MakeFreeTypeFaceFromData(ByteBuffer fontData) =>
_MakeFreeTypeFaceFromData(fontData.toJS);
}
/// Any Skia object that has a `delete` method.
@JS()
@anonymous
@staticInterop
class SkDeletable {}
extension SkDeletableExtension on SkDeletable {
/// Deletes the C++ side object.
external JSVoid delete();
/// Returns whether the corresponding C++ object has been deleted.
@JS('isDeleted')
external JSBoolean _isDeleted();
bool isDeleted() => _isDeleted().toDart;
/// Returns the JavaScript constructor for this object.
///
/// This is useful for debugging.
external JsConstructor get constructor;
}
@JS()
@anonymous
@staticInterop
class JsConstructor {}
extension JsConstructorExtension on JsConstructor {
/// The name of the "constructor", typically the function name called with
/// the `new` keyword, or the ES6 class name.
///
/// This is useful for debugging.
@JS('name')
external JSString get _name;
String get name => _name.toDart;
}
@JS()
@staticInterop
class SkData {}
extension SkDataExtension on SkData {
@JS('size')
external JSNumber _size();
double size() => _size().toDartDouble;
@JS('isEmpty')
external JSBoolean _isEmpty();
bool isEmpty() => _isEmpty().toDart;
@JS('bytes')
external JSUint8Array _bytes();
Uint8List bytes() => _bytes().toDart;
external JSVoid delete();
}
@JS()
@anonymous
@staticInterop
class SkImageInfo {
factory SkImageInfo({
required double width,
required double height,
required SkColorType colorType,
required SkAlphaType alphaType,
required ColorSpace colorSpace,
}) => SkImageInfo._(width: width.toJS,
height: height.toJS,
colorType: colorType,
alphaType: alphaType,
colorSpace: colorSpace);
external factory SkImageInfo._({
required JSNumber width,
required JSNumber height,
required SkColorType colorType,
required SkAlphaType alphaType,
required ColorSpace colorSpace,
});
}
extension SkImageInfoExtension on SkImageInfo {
external SkAlphaType get alphaType;
external ColorSpace get colorSpace;
external SkColorType get colorType;
@JS('height')
external JSNumber get _height;
double get height => _height.toDartDouble;
@JS('isEmpty')
external JSBoolean get _isEmpty;
bool get isEmpty => _isEmpty.toDart;
@JS('isOpaque')
external JSBoolean get _isOpaque;
bool get isOpaque => _isOpaque.toDart;
@JS('bounds')
external JSFloat32Array get _bounds;
Float32List get bounds => _bounds.toDart;
@JS('width')
external JSNumber get _width;
double get width => _width.toDartDouble;
external SkImageInfo makeAlphaType(SkAlphaType alphaType);
external SkImageInfo makeColorSpace(ColorSpace colorSpace);
external SkImageInfo makeColorType(SkColorType colorType);
@JS('makeWH')
external SkImageInfo _makeWH(JSNumber width, JSNumber height);
SkImageInfo makeWH(double width, double height) =>
_makeWH(width.toJS, height.toJS);
}
@JS()
@anonymous
@staticInterop
class SkPartialImageInfo {
factory SkPartialImageInfo({
required double width,
required double height,
required SkColorType colorType,
required SkAlphaType alphaType,
required ColorSpace colorSpace,
}) => SkPartialImageInfo._(width: width.toJS,
height: height.toJS,
colorType: colorType,
alphaType: alphaType,
colorSpace: colorSpace);
external factory SkPartialImageInfo._({
required JSNumber width,
required JSNumber height,
required SkColorType colorType,
required SkAlphaType alphaType,
required ColorSpace colorSpace,
});
}
extension SkPartialImageInfoExtension on SkPartialImageInfo {
external SkAlphaType get alphaType;
external ColorSpace get colorSpace;
external SkColorType get colorType;
@JS('height')
external JSNumber get _height;
double get height => _height.toDartDouble;
@JS('width')
external JSNumber get _width;
double get width => _width.toDartDouble;
}
@JS('window.flutterCanvasKit.RuntimeEffect')
@anonymous
@staticInterop
class SkRuntimeEffect {}
@JS('window.flutterCanvasKit.RuntimeEffect.Make')
external SkRuntimeEffect? _MakeRuntimeEffect(JSString program);
SkRuntimeEffect? MakeRuntimeEffect(String program) =>
_MakeRuntimeEffect(program.toJS);
extension SkSkRuntimeEffectExtension on SkRuntimeEffect {
@JS('makeShader')
external SkShader? _makeShader(JSAny uniforms);
SkShader? makeShader(List<Object> uniforms) =>
_makeShader(uniforms.toJSAnyShallow);
@JS('makeShaderWithChildren')
external SkShader? _makeShaderWithChildren(JSAny uniforms, JSAny children);
SkShader? makeShaderWithChildren(
List<Object> uniforms, List<Object?> children) =>
_makeShaderWithChildren(uniforms.toJSAnyShallow,
children.toJSAnyShallow);
}
const String _kFullCanvasKitJsFileName = 'canvaskit.js';
const String _kChromiumCanvasKitJsFileName = 'chromium/canvaskit.js';
String get _canvasKitBaseUrl => configuration.canvasKitBaseUrl;
@visibleForTesting
List<String> getCanvasKitJsFileNames(CanvasKitVariant variant) {
switch (variant) {
case CanvasKitVariant.auto:
return <String>[
if (_enableCanvasKitChromiumInAutoMode) _kChromiumCanvasKitJsFileName,
_kFullCanvasKitJsFileName,
];
case CanvasKitVariant.full:
return <String>[_kFullCanvasKitJsFileName];
case CanvasKitVariant.chromium:
return <String>[_kChromiumCanvasKitJsFileName];
}
}
Iterable<String> get _canvasKitJsUrls {
return getCanvasKitJsFileNames(configuration.canvasKitVariant).map(
(String filename) => '$_canvasKitBaseUrl$filename',
);
}
@visibleForTesting
String canvasKitWasmModuleUrl(String file, String canvasKitBase) =>
canvasKitBase + file;
/// Download and initialize the CanvasKit module.
///
/// Downloads the CanvasKit JavaScript, then calls `CanvasKitInit` to download
/// and intialize the CanvasKit wasm.
Future<CanvasKit> downloadCanvasKit() async {
await _downloadOneOf(_canvasKitJsUrls);
final CanvasKit canvasKit = await CanvasKitInit(CanvasKitInitOptions(
locateFile: createLocateFileCallback(canvasKitWasmModuleUrl),
));
if (canvasKit.ParagraphBuilder.RequiresClientICU() && !browserSupportsCanvaskitChromium) {
throw Exception(
'The CanvasKit variant you are using only works on Chromium browsers. '
'Please use a different CanvasKit variant, or use a Chromium browser.',
);
}
return canvasKit;
}
/// Finds the first URL in [urls] that can be downloaded successfully, and
/// downloads it.
///
/// If none of the URLs can be downloaded, throws an [Exception].
Future<void> _downloadOneOf(Iterable<String> urls) async {
for (final String url in urls) {
if (await _downloadCanvasKitJs(url)) {
return;
}
}
// Reaching this point means that all URLs failed to download.
throw Exception(
'Failed to download any of the following CanvasKit URLs: $urls',
);
}
/// Downloads the CanvasKit JavaScript file at [url].
///
/// Returns a [Future] that completes with `true` if the CanvasKit JavaScript
/// file was successfully downloaded, or `false` if it failed.
Future<bool> _downloadCanvasKitJs(String url) {
final DomHTMLScriptElement canvasKitScript =
createDomHTMLScriptElement(configuration.nonce);
canvasKitScript.src = createTrustedScriptUrl(url);
final Completer<bool> canvasKitLoadCompleter = Completer<bool>();
late final DomEventListener loadCallback;
late final DomEventListener errorCallback;
void loadEventHandler(DomEvent _) {
canvasKitScript.remove();
canvasKitLoadCompleter.complete(true);
}
void errorEventHandler(DomEvent errorEvent) {
canvasKitScript.remove();
canvasKitLoadCompleter.complete(false);
}
loadCallback = createDomEventListener(loadEventHandler);
errorCallback = createDomEventListener(errorEventHandler);
canvasKitScript.addEventListener('load', loadCallback);
canvasKitScript.addEventListener('error', errorCallback);
domDocument.head!.appendChild(canvasKitScript);
return canvasKitLoadCompleter.future;
}
| engine/lib/web_ui/lib/src/engine/canvaskit/canvaskit_api.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/canvaskit_api.dart",
"repo_id": "engine",
"token_count": 35897
} | 289 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_interop';
import 'package:meta/meta.dart';
import 'package:ui/src/engine.dart';
/// Collects native objects that weren't explicitly disposed of using
/// [UniqueRef.dispose] or [CountedRef.unref].
///
/// We use this to delete Skia objects when their "Ck" wrapper is garbage
/// collected.
///
/// Example sequence of events:
///
/// 1. A (CkPaint, SkPaint) pair created.
/// 2. The paint is used to paint some picture.
/// 3. CkPaint is dropped by the app.
/// 4. GC decides to perform a GC cycle and collects CkPaint.
/// 5. The finalizer function is called with the SkPaint as the sole argument.
/// 6. We call `delete` on SkPaint.
DomFinalizationRegistry _finalizationRegistry = DomFinalizationRegistry(
(JSBoxedDartObject boxedUniq) {
final UniqueRef<Object> uniq = boxedUniq.fromJSWrapper as UniqueRef<Object>;
uniq.collect();
}.toJS
);
NativeMemoryFinalizationRegistry nativeMemoryFinalizationRegistry = NativeMemoryFinalizationRegistry();
/// An indirection to [DomFinalizationRegistry] to enable tests provide a
/// mock implementation of a finalization registry.
class NativeMemoryFinalizationRegistry {
void register(Object owner, UniqueRef<Object> ref) {
if (browserSupportsFinalizationRegistry) {
_finalizationRegistry.register(owner.toJSWrapper, ref.toJSWrapper);
}
}
}
/// Manages the lifecycle of a C++ object referenced by a single Dart object.
///
/// It is expected that when the C++ object is no longer needed [dispose] is
/// called.
///
/// To prevent memory leaks, the underlying C++ object is deleted by the GC if
/// it wasn't previously disposed of explicitly.
class UniqueRef<T extends Object> {
UniqueRef(Object owner, T nativeObject, this._debugOwnerLabel) {
_nativeObject = nativeObject;
if (Instrumentation.enabled) {
Instrumentation.instance.incrementCounter('$_debugOwnerLabel Created');
}
nativeMemoryFinalizationRegistry.register(owner, this);
}
T? _nativeObject;
final String _debugOwnerLabel;
/// Returns the underlying native object reference, if it has not been
/// disposed of yet.
///
/// The returned reference must not be stored. I should only be borrowed
/// temporarily. Storing this reference may result in dangling pointer errors.
T get nativeObject {
assert(!isDisposed, 'The native object of $_debugOwnerLabel was disposed.');
return _nativeObject!;
}
/// Returns whether the underlying native object has been disposed and
/// therefore can no longer be used.
bool get isDisposed => _nativeObject == null;
/// Disposes the underlying native object.
///
/// The underlying object may be deleted or its ref count may be bumped down.
/// The exact action taken depends on the sharing model of that particular
/// object. For example, an [SkImage] may not be immediately deleted if a
/// [SkPicture] exists that still references it. On the other hand, [SkPaint]
/// is deleted eagerly.
void dispose() {
assert(!isDisposed, 'A native object reference cannot be disposed more than once.');
if (Instrumentation.enabled) {
Instrumentation.instance.incrementCounter('$_debugOwnerLabel Deleted');
}
final SkDeletable object = nativeObject as SkDeletable;
if (!object.isDeleted()) {
object.delete();
}
_nativeObject = null;
}
/// Called by the garbage [Collector] when the owner of this handle is
/// collected.
///
/// Garbage collection is used as a back-up for the cases when the handle
/// isn't disposed of explicitly by calling [dispose]. It most likely
/// indicates a memory leak or inefficiency in the framework or application
/// code.
@visibleForTesting
void collect() {
if (!isDisposed) {
if (Instrumentation.enabled) {
Instrumentation.instance.incrementCounter('$_debugOwnerLabel Leaked');
}
dispose();
}
}
}
/// Interface that classes wrapping [UniqueRef] must implement.
///
/// Used to collect stack traces in debug mode.
abstract class StackTraceDebugger {
/// The stack trace pointing to code location that created or upreffed a
/// [CountedRef].
StackTrace get debugStackTrace;
}
/// Manages the lifecycle of a C++ object referenced by multiple Dart objects.
///
/// Uses reference counting to manage the lifecycle of the C++ object.
///
/// If the C++ object has a unique owner, use [UniqueRef] instead.
///
/// The [ref] method can be used to increment the refcount to tell this box to
/// keep the underlying C++ object alive.
///
/// The [unref] method can be used to decrement the refcount indicating that a
/// referring object no longer needs it. When the refcount drops to zero the
/// underlying C++ object is deleted.
///
/// In addition to ref counting, this object is also managed by GC. When this
/// reference is garbage collected, the underlying C++ object is automatically
/// deleted. This is mostly done to prevent memory leaks in production. Well
/// behaving framework and app code are expected to rely on [ref] and [unref]
/// for timely collection of resources.
class CountedRef<R extends StackTraceDebugger, T extends Object> {
/// Creates a counted reference.
CountedRef(T nativeObject, R debugReferrer, String debugLabel) {
_ref = UniqueRef<T>(this, nativeObject, debugLabel);
assert(() {
debugReferrers.add(debugReferrer);
return true;
}());
assert(refCount == debugReferrers.length);
}
/// The native object reference whose lifecycle is being managed by this ref
/// count.
///
/// Do not store this value outside this class.
late final UniqueRef<T> _ref;
/// Returns the underlying native object reference, if it has not been
/// disposed of yet.
///
/// The returned reference must not be stored. I should only be borrowed
/// temporarily. Storing this reference may result in dangling pointer errors.
T get nativeObject => _ref.nativeObject;
/// The number of objects sharing references to this box.
///
/// When this count reaches zero, the underlying [nativeObject] is scheduled
/// for deletion.
int get refCount => _refCount;
int _refCount = 1;
/// Whether the underlying [nativeObject] has been disposed and is no longer
/// accessible.
bool get isDisposed => _ref.isDisposed;
/// When assertions are enabled, stores all objects that share this box.
///
/// The length of this list is always identical to [refCount].
///
/// This list can be used for debugging ref counting issues.
final Set<R> debugReferrers = <R>{};
/// If asserts are enabled, the [StackTrace]s representing when a reference
/// was created.
List<StackTrace> debugGetStackTraces() {
List<StackTrace>? result;
assert(() {
result = debugReferrers
.map<StackTrace>((R referrer) => referrer.debugStackTrace)
.toList();
return true;
}());
if (result != null) {
return result!;
}
throw UnsupportedError('');
}
/// Increases the reference count of this box because a new object began
/// sharing ownership of the underlying [nativeObject].
void ref(R debugReferrer) {
assert(
!_ref.isDisposed,
'Cannot increment ref count on a deleted handle.',
);
assert(_refCount > 0);
assert(
debugReferrers.add(debugReferrer),
'Attempted to increment ref count by the same referrer more than once.',
);
_refCount += 1;
assert(refCount == debugReferrers.length);
}
/// Decrements the reference count for the [nativeObject].
///
/// Does nothing if the object has already been deleted.
///
/// If this causes the reference count to drop to zero, deletes the
/// [nativeObject].
void unref(R debugReferrer) {
assert(
!_ref.isDisposed,
'Attempted to unref an already deleted native object.',
);
assert(
debugReferrers.remove(debugReferrer),
'Attempted to decrement ref count by the same referrer more than once.',
);
_refCount -= 1;
assert(refCount == debugReferrers.length);
if (_refCount == 0) {
_ref.dispose();
}
}
}
| engine/lib/web_ui/lib/src/engine/canvaskit/native_memory.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/native_memory.dart",
"repo_id": "engine",
"token_count": 2445
} | 290 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_interop';
import 'dart:typed_data';
import '../dom.dart';
import '../text/line_breaker.dart';
import '../util.dart';
import 'canvaskit_api.dart';
typedef SegmentationResult = ({
Uint32List words,
Uint32List graphemes,
Uint32List breaks,
});
// The cache numbers below were picked based on the following logic.
//
// Most paragraphs in an app are small (e.g. icons, button labels, etc). These
// paragraphs are also cheap to cache. So we cache a lot of them. 100,000 of
// them amounts to a worst case of 5MB (10-character long text + words uint list
// + graphemes uint list + breaks uint list).
//
// Large paragraphs are less common (a handful per page), but are expensive to
// cache. So we cache fewer of them. 20 of them at a length of 50,000 characters
// amount to a memory usage of 5MB (50,000-character long text + words uint list
// + graphemes uint list + breaks uint list).
//
// Medium paragraphs are somewhere in between. 10,000 of them amount to a worst
// case of 5MB (100-character long text + words uint list + graphemes uint list
// + breaks uint list).
typedef SegmentationCacheSpec = ({int cacheSize, int maxTextLength});
const SegmentationCacheSpec kSmallParagraphCacheSpec = (cacheSize: 100000, maxTextLength: 10);
const SegmentationCacheSpec kMediumParagraphCacheSpec = (cacheSize: 10000, maxTextLength: 100);
const SegmentationCacheSpec kLargeParagraphCacheSpec = (cacheSize: 20, maxTextLength: 50000);
typedef SegmentationCache = ({
LruCache<String, SegmentationResult> small,
LruCache<String, SegmentationResult> medium,
LruCache<String, SegmentationResult> large,
});
/// Caches segmentation results for small, medium and large paragraphts.
///
/// Paragraphs are frequently re-created because of style or font changes, while
/// their text contents remain the same. This cache is effective at
/// short-circuiting the segmentation of such paragraphs.
final SegmentationCache segmentationCache = (
small: LruCache<String, SegmentationResult>(kSmallParagraphCacheSpec.cacheSize),
medium: LruCache<String, SegmentationResult>(kMediumParagraphCacheSpec.cacheSize),
large: LruCache<String, SegmentationResult>(kLargeParagraphCacheSpec.cacheSize),
);
extension SegmentationCacheExtensions on SegmentationCache {
/// Gets the appropriate cache for the given [text].
LruCache<String, SegmentationResult>? getCacheForText(String text) {
if (text.length <= kSmallParagraphCacheSpec.maxTextLength) {
return small;
}
if (text.length <= kMediumParagraphCacheSpec.maxTextLength) {
return medium;
}
if (text.length <= kLargeParagraphCacheSpec.maxTextLength) {
return large;
}
return null;
}
/// Clears all the caches.
void clear() {
small.clear();
medium.clear();
large.clear();
}
}
/// Injects required ICU data into the [builder].
///
/// This should only be used with the CanvasKit Chromium variant that's compiled
/// without ICU data.
void injectClientICU(SkParagraphBuilder builder) {
assert(
canvasKit.ParagraphBuilder.RequiresClientICU(),
'This method should only be used with the CanvasKit Chromium variant.',
);
final SegmentationResult result = segmentText(builder.getText());
builder.setWordsUtf16(result.words);
builder.setGraphemeBreaksUtf16(result.graphemes);
builder.setLineBreaksUtf16(result.breaks);
}
/// Segments the [text] into words, graphemes and line breaks.
///
/// Caches results in [segmentationCache].
SegmentationResult segmentText(String text) {
final LruCache<String, SegmentationResult>? cache = segmentationCache.getCacheForText(text);
final SegmentationResult? cachedResult = cache?[text];
final SegmentationResult result;
if (cachedResult != null) {
result = cachedResult;
} else {
result = (
words: fragmentUsingIntlSegmenter(text, IntlSegmenterGranularity.word),
graphemes: fragmentUsingIntlSegmenter(text, IntlSegmenterGranularity.grapheme),
breaks: fragmentUsingV8LineBreaker(text),
);
}
// Save or promote to most recently used.
cache?.cache(text, result);
return result;
}
/// The granularity at which to segment text.
///
/// To find all supported granularities, see:
/// - https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Intl/Segmenter/Segmenter
enum IntlSegmenterGranularity {
grapheme,
word,
}
final Map<IntlSegmenterGranularity, DomSegmenter> _intlSegmenters = <IntlSegmenterGranularity, DomSegmenter>{
IntlSegmenterGranularity.grapheme: createIntlSegmenter(granularity: 'grapheme'),
IntlSegmenterGranularity.word: createIntlSegmenter(granularity: 'word'),
};
Uint32List fragmentUsingIntlSegmenter(
String text,
IntlSegmenterGranularity granularity,
) {
final DomSegmenter segmenter = _intlSegmenters[granularity]!;
final DomIteratorWrapper<DomSegment> iterator = segmenter.segment(text).iterator();
final List<int> breaks = <int>[];
while (iterator.moveNext()) {
breaks.add(iterator.current.index);
}
breaks.add(text.length);
return Uint32List.fromList(breaks);
}
// These are the soft/hard line break values expected by Skia's SkParagraph.
const int _kSoftLineBreak = 0;
const int _kHardLineBreak = 1;
final DomV8BreakIterator _v8LineBreaker = createV8BreakIterator();
Uint32List fragmentUsingV8LineBreaker(String text) {
final List<LineBreakFragment> fragments =
breakLinesUsingV8BreakIterator(text, text.toJS, _v8LineBreaker);
final int size = (fragments.length + 1) * 2;
final Uint32List typedArray = Uint32List(size);
typedArray[0] = 0; // start index
typedArray[1] = _kSoftLineBreak; // break type
for (int i = 0; i < fragments.length; i++) {
final LineBreakFragment fragment = fragments[i];
final int uint32Index = 2 + i * 2;
typedArray[uint32Index] = fragment.end;
typedArray[uint32Index + 1] = fragment.type == LineBreakType.mandatory
? _kHardLineBreak
: _kSoftLineBreak;
}
return typedArray;
}
| engine/lib/web_ui/lib/src/engine/canvaskit/text_fragmenter.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/text_fragmenter.dart",
"repo_id": "engine",
"token_count": 1967
} | 291 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui;
import '../browser_detection.dart';
import '../canvas_pool.dart';
import '../display.dart';
import '../dom.dart';
import '../engine_canvas.dart';
import '../frame_reference.dart';
import '../html_image_codec.dart';
import '../text/canvas_paragraph.dart';
import '../util.dart';
import '../vector_math.dart';
import 'clip.dart';
import 'color_filter.dart';
import 'dom_canvas.dart';
import 'painting.dart';
import 'path/path.dart';
import 'recording_canvas.dart';
import 'render_vertices.dart';
import 'shaders/image_shader.dart';
import 'shaders/shader.dart';
/// A raw HTML canvas that is directly written to.
class BitmapCanvas extends EngineCanvas {
/// Allocates a canvas with enough memory to paint a picture within the given
/// [bounds].
///
/// This canvas can be reused by pictures with different paint bounds as long
/// as the [Rect.size] of the bounds fully fit within the size used to
/// initialize this canvas.
BitmapCanvas(this._bounds, RenderStrategy renderStrategy,
{double density = 1.0})
: _density = density,
_renderStrategy = renderStrategy,
widthInBitmapPixels = widthToPhysical(_bounds.width),
heightInBitmapPixels = heightToPhysical(_bounds.height),
_canvasPool = CanvasPool(widthToPhysical(_bounds.width),
heightToPhysical(_bounds.height), density) {
rootElement.style.position = 'absolute';
// Adds one extra pixel to the requested size. This is to compensate for
// _initializeViewport() snapping canvas position to 1 pixel, causing
// painting to overflow by at most 1 pixel.
_canvasPositionX = _bounds.left.floor() - kPaddingPixels;
_canvasPositionY = _bounds.top.floor() - kPaddingPixels;
_updateRootElementTransform();
_canvasPool.mount(rootElement as DomHTMLElement);
_setupInitialTransform();
}
/// Constructs bitmap canvas to capture image data.
factory BitmapCanvas.imageData(ui.Rect bounds) {
final BitmapCanvas bitmapCanvas = BitmapCanvas(bounds, RenderStrategy());
bitmapCanvas._preserveImageData = true;
return bitmapCanvas;
}
/// The rectangle positioned relative to the parent layer's coordinate
/// system's origin, within which this canvas paints.
///
/// Painting outside these bounds will result in cropping.
ui.Rect get bounds => _bounds;
set bounds(ui.Rect newValue) {
_bounds = newValue;
final int newCanvasPositionX = _bounds.left.floor() - kPaddingPixels;
final int newCanvasPositionY = _bounds.top.floor() - kPaddingPixels;
if (_canvasPositionX != newCanvasPositionX ||
_canvasPositionY != newCanvasPositionY) {
_canvasPositionX = newCanvasPositionX;
_canvasPositionY = newCanvasPositionY;
_updateRootElementTransform();
}
}
ui.Rect _bounds;
CrossFrameCache<DomHTMLElement>? _elementCache;
/// The amount of padding to add around the edges of this canvas to
/// ensure that anti-aliased arcs are not clipped.
static const int kPaddingPixels = 1;
@override
final DomElement rootElement = createDomElement('flt-canvas');
final CanvasPool _canvasPool;
/// The size of the paint [bounds].
ui.Size get size => _bounds.size;
/// The last CSS font string is cached to optimize the case where the font
/// styles hasn't changed.
String? _cachedLastCssFont;
/// List of extra sibling elements created for paragraphs and clipping.
final List<DomElement> _children = <DomElement>[];
/// The number of pixels along the width of the bitmap that the canvas element
/// renders into.
///
/// These pixels are different from the logical CSS pixels. Here a pixel
/// literally means 1 point with a RGBA color.
final int widthInBitmapPixels;
/// The number of pixels along the width of the bitmap that the canvas element
/// renders into.
///
/// These pixels are different from the logical CSS pixels. Here a pixel
/// literally means 1 point with a RGBA color.
final int heightInBitmapPixels;
/// The number of pixels in the bitmap that the canvas element renders into.
///
/// These pixels are different from the logical CSS pixels. Here a pixel
/// literally means 1 point with a RGBA color.
int get bitmapPixelCount => widthInBitmapPixels * heightInBitmapPixels;
int _saveCount = 0;
/// Keeps track of what device pixel ratio was used when this [BitmapCanvas]
/// was created.
final double _devicePixelRatio =
EngineFlutterDisplay.instance.browserDevicePixelRatio;
// Compensation for [_initializeViewport] snapping canvas position to 1 pixel.
int? _canvasPositionX, _canvasPositionY;
// Indicates the instructions following drawImage or drawParagraph that
// a child element was created to paint.
// TODO(yjbanov): When childElements are created by
// drawImage/drawParagraph commands, compositing order is not correctly
// handled when we interleave these with other paint commands.
// To solve this, recording canvas will have to check the paint queue
// and send a hint to EngineCanvas that additional canvas layers need
// to be used to composite correctly. In practice this is very rare
// with Widgets but CustomPainter(s) can hit this code path.
bool _childOverdraw = false;
/// Forces text to be drawn using HTML rather than bitmap.
///
/// Use this for tests only.
set debugChildOverdraw(bool value) {
_childOverdraw = value;
}
/// Indicates bitmap canvas contains a 3d transform.
/// WebKit fails to preserve paint order when this happens and therefore
/// requires insertion of <div style="transform: translate3d(0,0,0);"> to be
/// used for each child to force correct rendering order.
bool _contains3dTransform = false;
/// Indicates that contents should be rendered into canvas so a dataUrl
/// can be constructed from contents.
bool _preserveImageData = false;
/// Canvas pixel to screen pixel ratio. Similar to dpi but
/// uses global transform of canvas to compute ratio.
double get density => _density;
final double _density;
final RenderStrategy _renderStrategy;
/// Setup cache for reusing DOM elements across frames.
void setElementCache(CrossFrameCache<DomHTMLElement>? cache) {
_elementCache = cache;
}
void _updateRootElementTransform() {
// Flutter emits paint operations positioned relative to the parent layer's
// coordinate system. However, canvas' coordinate system's origin is always
// in the top-left corner of the canvas. We therefore need to inject an
// initial translation so the paint operations are positioned as expected.
//
// The flooring of the value is to ensure that canvas' top-left corner
// lands on the physical pixel.
// TODO(yjbanov): !This is not accurate if there are
// transforms higher up in the stack.
rootElement.style.transform =
'translate(${_canvasPositionX}px, ${_canvasPositionY}px)';
}
void _setupInitialTransform() {
final double canvasPositionCorrectionX = _bounds.left -
BitmapCanvas.kPaddingPixels -
_canvasPositionX!.toDouble();
final double canvasPositionCorrectionY = _bounds.top -
BitmapCanvas.kPaddingPixels -
_canvasPositionY!.toDouble();
// This compensates for the translate on the `rootElement`.
_canvasPool.initialTransform = ui.Offset(
-_bounds.left + canvasPositionCorrectionX + BitmapCanvas.kPaddingPixels,
-_bounds.top + canvasPositionCorrectionY + BitmapCanvas.kPaddingPixels,
);
}
static int widthToPhysical(double width) {
final double boundsWidth = width + 1;
return (boundsWidth * EngineFlutterDisplay.instance.browserDevicePixelRatio)
.ceil() +
2 * kPaddingPixels;
}
static int heightToPhysical(double height) {
final double boundsHeight = height + 1;
return (boundsHeight * EngineFlutterDisplay.instance.browserDevicePixelRatio)
.ceil() +
2 * kPaddingPixels;
}
// Used by picture to assess if canvas is large enough to reuse as is.
bool doesFitBounds(ui.Rect newBounds, double newDensity) {
return widthInBitmapPixels >= widthToPhysical(newBounds.width) &&
heightInBitmapPixels >= heightToPhysical(newBounds.height) &&
_density == newDensity;
}
@override
void dispose() {
_canvasPool.dispose();
}
/// Prepare to reuse this canvas by clearing it's current contents.
@override
void clear() {
_contains3dTransform = false;
_canvasPool.clear();
final int len = _children.length;
for (int i = 0; i < len; i++) {
final DomElement child = _children[i];
// Don't remove children that have been reused by CrossFrameCache.
if (child.parentNode == rootElement) {
child.remove();
}
}
_children.clear();
_childOverdraw = false;
_cachedLastCssFont = null;
_setupInitialTransform();
}
/// Checks whether this [BitmapCanvas] can still be recycled and reused.
///
/// See also:
///
/// * [PersistedPicture._applyBitmapPaint] which uses this method to
/// decide whether to reuse this canvas or not.
/// * [PersistedPicture._recycleCanvas] which also uses this method
/// for the same reason.
bool isReusable() {
return _devicePixelRatio == EngineFlutterDisplay.instance.browserDevicePixelRatio;
}
/// Returns a "data://" URI containing a representation of the image in this
/// canvas in PNG format.
String toDataUrl() {
return _canvasPool.toDataUrl();
}
/// Sets the global paint styles to correspond to [paint].
void setUpPaint(SurfacePaintData paint, ui.Rect? shaderBounds) {
_canvasPool.contextHandle.setUpPaint(paint, shaderBounds);
}
void tearDownPaint() {
_canvasPool.contextHandle.tearDownPaint();
}
@override
int save() {
_canvasPool.save();
return _saveCount++;
}
void saveLayer(ui.Rect bounds, ui.Paint paint) {
save();
}
@override
void restore() {
_canvasPool.restore();
_saveCount--;
_cachedLastCssFont = null;
}
// TODO(yjbanov): not sure what this is attempting to do, but it is probably
// wrong because some clips and transforms are expressed using
// HTML DOM elements.
void restoreToCount(int count) {
assert(_saveCount >= count);
final int restores = _saveCount - count;
for (int i = 0; i < restores; i++) {
_canvasPool.restore();
}
_saveCount = count;
}
@override
void translate(double dx, double dy) {
_canvasPool.translate(dx, dy);
}
@override
void scale(double sx, double sy) {
_canvasPool.scale(sx, sy);
}
@override
void rotate(double radians) {
_canvasPool.rotate(radians);
}
@override
void skew(double sx, double sy) {
_canvasPool.skew(sx, sy);
}
@override
void transform(Float32List matrix4) {
final TransformKind transformKind = transformKindOf(matrix4);
if (transformKind == TransformKind.complex) {
_contains3dTransform = true;
}
_canvasPool.transform(matrix4);
}
@override
void clipRect(ui.Rect rect, ui.ClipOp clipOp) {
if (clipOp == ui.ClipOp.difference) {
// Create 2 rectangles inside each other that represents
// clip area difference using even-odd fill rule.
final SurfacePath path = SurfacePath();
path.fillType = ui.PathFillType.evenOdd;
path.addRect(ui.Rect.fromLTWH(0, 0, _bounds.width, _bounds.height));
path.addRect(rect);
_canvasPool.clipPath(path);
} else {
_canvasPool.clipRect(rect);
}
}
@override
void clipRRect(ui.RRect rrect) {
_canvasPool.clipRRect(rrect);
}
@override
void clipPath(ui.Path path) {
_canvasPool.clipPath(path);
}
/// Whether drawing operation should use DOM node instead of Canvas.
///
/// - Perspective transforms are not supported by canvas and require
/// DOM to render correctly.
/// - Pictures typically have large rect/rounded rectangles as background
/// prefer DOM if canvas has not been allocated yet.
///
bool _useDomForRenderingFill(SurfacePaintData paint) {
if (_preserveImageData) {
return false;
}
return _renderStrategy.isInsideSvgFilterTree ||
_contains3dTransform ||
(_childOverdraw &&
!_canvasPool.hasCanvas &&
paint.maskFilter == null &&
paint.shader == null &&
paint.style != ui.PaintingStyle.stroke);
}
/// Same as [_useDomForRenderingFill] but allows stroke as well.
///
/// DOM canvas is generated for simple strokes using borders.
bool _useDomForRenderingFillAndStroke(SurfacePaintData paint) {
if (_preserveImageData) {
return false;
}
return _renderStrategy.isInsideSvgFilterTree ||
_contains3dTransform ||
((_childOverdraw ||
_renderStrategy.hasImageElements ||
_renderStrategy.hasParagraphs) &&
!_canvasPool.hasCanvas &&
paint.maskFilter == null &&
paint.shader == null);
}
@override
void drawColor(ui.Color color, ui.BlendMode blendMode) {
final SurfacePaintData paintData = SurfacePaintData()
..color = color.value
..blendMode = blendMode;
if (_useDomForRenderingFill(paintData)) {
drawRect(_computeScreenBounds(_canvasPool.currentTransform), paintData);
} else {
_canvasPool.drawColor(color, blendMode);
}
}
@override
void drawLine(ui.Offset p1, ui.Offset p2, SurfacePaintData paint) {
if (_useDomForRenderingFill(paint)) {
final SurfacePath path = SurfacePath()
..moveTo(p1.dx, p1.dy)
..lineTo(p2.dx, p2.dy);
drawPath(path, paint);
} else {
final ui.Rect? shaderBounds =
(paint.shader != null) ? ui.Rect.fromPoints(p1, p2) : null;
setUpPaint(paint, shaderBounds);
_canvasPool.strokeLine(p1, p2);
tearDownPaint();
}
}
@override
void drawPaint(SurfacePaintData paint) {
if (_useDomForRenderingFill(paint)) {
drawRect(_computeScreenBounds(_canvasPool.currentTransform), paint);
} else {
final ui.Rect? shaderBounds =
(paint.shader != null) ? _computePictureBounds() : null;
setUpPaint(paint, shaderBounds);
_canvasPool.fill();
tearDownPaint();
}
}
@override
void drawRect(ui.Rect rect, SurfacePaintData paint) {
if (_useDomForRenderingFillAndStroke(paint)) {
rect = adjustRectForDom(rect, paint);
final DomHTMLElement element = buildDrawRectElement(
rect, paint, 'draw-rect', _canvasPool.currentTransform);
_drawElement(element, rect.topLeft, paint);
} else {
setUpPaint(paint, rect);
_canvasPool.drawRect(rect, paint.style);
tearDownPaint();
}
}
/// Inserts a dom element at [offset] creating stack of divs for clipping
/// if required.
void _drawElement(
DomElement element, ui.Offset offset, SurfacePaintData paint) {
if (_canvasPool.isClipped) {
final List<DomElement> clipElements = _clipContent(
_canvasPool.clipStack!,
element,
ui.Offset.zero,
transformWithOffset(_canvasPool.currentTransform, offset));
for (final DomElement clipElement in clipElements) {
rootElement.append(clipElement);
_children.add(clipElement);
}
} else {
rootElement.append(element);
_children.add(element);
}
final ui.BlendMode? blendMode = paint.blendMode;
if (blendMode != null) {
element.style.mixBlendMode = blendModeToCssMixBlendMode(blendMode) ?? '';
}
// Switch to preferring DOM from now on, and close the current canvas.
_closeCanvas();
}
@override
void drawRRect(ui.RRect rrect, SurfacePaintData paint) {
if (_useDomForRenderingFillAndStroke(paint)) {
final ui.Rect rect = adjustRectForDom(rrect.outerRect, paint);
final DomHTMLElement element = buildDrawRectElement(
rect, paint, 'draw-rrect', _canvasPool.currentTransform);
applyRRectBorderRadius(element.style, rrect);
_drawElement(element, rect.topLeft, paint);
} else {
setUpPaint(paint, rrect.outerRect);
_canvasPool.drawRRect(rrect, paint.style);
tearDownPaint();
}
}
@override
void drawDRRect(ui.RRect outer, ui.RRect inner, SurfacePaintData paint) {
setUpPaint(paint, outer.outerRect);
_canvasPool.drawDRRect(outer, inner, paint.style);
tearDownPaint();
}
@override
void drawOval(ui.Rect rect, SurfacePaintData paint) {
if (_useDomForRenderingFill(paint)) {
rect = adjustRectForDom(rect, paint);
final DomHTMLElement element = buildDrawRectElement(
rect, paint, 'draw-oval', _canvasPool.currentTransform);
_drawElement(element, rect.topLeft, paint);
element.style.borderRadius =
'${rect.width / 2.0}px / ${rect.height / 2.0}px';
} else {
setUpPaint(paint, rect);
_canvasPool.drawOval(rect, paint.style);
tearDownPaint();
}
}
@override
void drawCircle(ui.Offset c, double radius, SurfacePaintData paint) {
if (_useDomForRenderingFillAndStroke(paint)) {
final ui.Rect rect = adjustRectForDom(ui.Rect.fromCircle(center: c, radius: radius), paint);
final DomHTMLElement element = buildDrawRectElement(
rect, paint, 'draw-circle', _canvasPool.currentTransform);
_drawElement(element, rect.topLeft, paint);
element.style.borderRadius = '50%';
} else {
setUpPaint(
paint,
paint.shader != null
? ui.Rect.fromCircle(center: c, radius: radius)
: null);
_canvasPool.drawCircle(c, radius, paint.style);
tearDownPaint();
}
}
@override
void drawPath(ui.Path path, SurfacePaintData paint) {
if (_useDomForRenderingFill(paint)) {
final Matrix4 transform = _canvasPool.currentTransform;
final SurfacePath surfacePath = path as SurfacePath;
final ui.Rect? pathAsRect = surfacePath.toRect();
if (pathAsRect != null) {
drawRect(pathAsRect, paint);
return;
}
final ui.RRect? pathAsRRect = surfacePath.toRoundedRect();
if (pathAsRRect != null) {
drawRRect(pathAsRRect, paint);
return;
}
final DomElement svgElm = pathToSvgElement(surfacePath, paint);
if (!_canvasPool.isClipped) {
final DomCSSStyleDeclaration style = svgElm.style;
style.position = 'absolute';
if (!transform.isIdentity()) {
style
..transform = matrix4ToCssTransform(transform)
..transformOrigin = '0 0 0';
}
}
_applyFilter(svgElm, paint);
_drawElement(svgElm, ui.Offset.zero, paint);
} else {
setUpPaint(paint, paint.shader != null ? path.getBounds() : null);
if (paint.style == null && paint.strokeWidth != null) {
_canvasPool.drawPath(path, ui.PaintingStyle.stroke);
} else {
_canvasPool.drawPath(path, paint.style);
}
tearDownPaint();
}
}
void _applyFilter(DomElement element, SurfacePaintData paint) {
if (paint.maskFilter != null) {
final bool isStroke = paint.style == ui.PaintingStyle.stroke;
final String cssColor = colorValueToCssString(paint.color);
final double sigma = paint.maskFilter!.webOnlySigma;
if (browserEngine == BrowserEngine.webkit && !isStroke) {
// A bug in webkit leaves artifacts when this element is animated
// with filter: blur, we use boxShadow instead.
element.style.boxShadow = '0px 0px ${sigma * 2.0}px $cssColor';
} else {
element.style.filter = 'blur(${sigma}px)';
}
}
}
@override
void drawShadow(ui.Path path, ui.Color color, double elevation,
bool transparentOccluder) {
_canvasPool.drawShadow(path, color, elevation, transparentOccluder);
}
@override
void drawImage(ui.Image image, ui.Offset p, SurfacePaintData paint) {
final DomHTMLElement imageElement = _drawImage(image, p, paint);
if (paint.colorFilter != null) {
_applyTargetSize(
imageElement, image.width.toDouble(), image.height.toDouble());
}
if (!_preserveImageData) {
_closeCanvas();
}
}
DomHTMLImageElement _reuseOrCreateImage(HtmlImage htmlImage) {
final String cacheKey = htmlImage.imgElement.src!;
if (_elementCache != null) {
final DomHTMLImageElement? imageElement =
_elementCache!.reuse(cacheKey) as DomHTMLImageElement?;
if (imageElement != null) {
return imageElement;
}
}
// Can't reuse, create new instance.
final DomHTMLImageElement newImageElement = htmlImage.cloneImageElement();
if (_elementCache != null) {
_elementCache!.cache(cacheKey, newImageElement, _onEvictElement);
}
return newImageElement;
}
static void _onEvictElement(DomHTMLElement element) {
element.remove();
}
DomHTMLElement _drawImage(
ui.Image image, ui.Offset p, SurfacePaintData paint) {
final HtmlImage htmlImage = image as HtmlImage;
final ui.BlendMode? blendMode = paint.blendMode;
final EngineHtmlColorFilter? colorFilter = createHtmlColorFilter(paint.colorFilter);
DomHTMLElement imgElement;
if (colorFilter is ModeHtmlColorFilter) {
imgElement = _createImageElementWithBlend(
image, colorFilter.color, colorFilter.blendMode, paint);
} else if (colorFilter is MatrixHtmlColorFilter) {
imgElement = _createImageElementWithSvgColorMatrixFilter(
image, colorFilter.matrix, paint);
} else {
// No Blending, create an image by cloning original loaded image.
imgElement = _reuseOrCreateImage(htmlImage);
}
imgElement.style.mixBlendMode = blendModeToCssMixBlendMode(blendMode) ?? '';
if (_preserveImageData && imgElement is DomHTMLImageElement) {
// If we're preserving image data, we have to actually draw the image
// element onto the canvas.
// TODO(jacksongardner): Make this actually work with color filters.
setUpPaint(paint, null);
_canvasPool.drawImage(imgElement, p);
tearDownPaint();
} else {
if (_canvasPool.isClipped) {
// Reset width/height since they may have been previously set.
imgElement.style
..removeProperty('width')
..removeProperty('height');
final List<DomElement> clipElements = _clipContent(
_canvasPool.clipStack!,
imgElement,
p,
_canvasPool.currentTransform);
for (final DomElement clipElement in clipElements) {
rootElement.append(clipElement);
_children.add(clipElement);
}
} else {
final String cssTransform = float64ListToCssTransform(
transformWithOffset(_canvasPool.currentTransform, p).storage);
imgElement.style
..transformOrigin = '0 0 0'
..transform = cssTransform
// Reset width/height since they may have been previously set.
..removeProperty('width')
..removeProperty('height');
rootElement.append(imgElement);
_children.add(imgElement);
}
}
return imgElement;
}
DomHTMLElement _createImageElementWithBlend(HtmlImage image, ui.Color color,
ui.BlendMode blendMode, SurfacePaintData paint) {
switch (blendMode) {
case ui.BlendMode.colorBurn:
case ui.BlendMode.colorDodge:
case ui.BlendMode.hue:
case ui.BlendMode.modulate:
case ui.BlendMode.overlay:
case ui.BlendMode.plus:
case ui.BlendMode.srcIn:
case ui.BlendMode.srcATop:
case ui.BlendMode.srcOut:
case ui.BlendMode.saturation:
case ui.BlendMode.color:
case ui.BlendMode.luminosity:
case ui.BlendMode.xor:
case ui.BlendMode.dstATop:
return _createImageElementWithSvgBlendFilter(
image, color, blendMode, paint);
default:
return _createBackgroundImageWithBlend(image, color, blendMode, paint);
}
}
@override
void drawImageRect(
ui.Image image, ui.Rect src, ui.Rect dst, SurfacePaintData paint) {
final bool requiresClipping = src.left != 0 ||
src.top != 0 ||
src.width != image.width ||
src.height != image.height;
// If source and destination sizes are identical, we can skip the longer
// code path that sets the size of the element and clips.
//
// If there is a color filter set however, we maybe using background-image
// to render therefore we have to explicitly set width/height of the
// element for blending to work with background-color.
if (dst.width == image.width &&
dst.height == image.height &&
!requiresClipping &&
paint.colorFilter == null) {
_drawImage(image, dst.topLeft, paint);
} else {
if (requiresClipping) {
save();
clipRect(dst, ui.ClipOp.intersect);
}
double targetLeft = dst.left;
double targetTop = dst.top;
if (requiresClipping) {
if (src.width != image.width) {
final double leftMargin = -src.left * (dst.width / src.width);
targetLeft += leftMargin;
}
if (src.height != image.height) {
final double topMargin = -src.top * (dst.height / src.height);
targetTop += topMargin;
}
}
final DomElement imgElement =
_drawImage(image, ui.Offset(targetLeft, targetTop), paint);
// To scale set width / height on destination image.
// For clipping we need to scale according to
// clipped-width/full image width and shift it according to left/top of
// source rectangle.
double targetWidth = dst.width;
double targetHeight = dst.height;
if (requiresClipping) {
targetWidth *= image.width / src.width;
targetHeight *= image.height / src.height;
}
_applyTargetSize(
imgElement as DomHTMLElement, targetWidth, targetHeight);
if (requiresClipping) {
restore();
}
}
_closeCanvas();
}
void _applyTargetSize(
DomHTMLElement imageElement, double targetWidth, double targetHeight) {
final DomCSSStyleDeclaration imageStyle = imageElement.style;
final String widthPx = '${targetWidth.toStringAsFixed(2)}px';
final String heightPx = '${targetHeight.toStringAsFixed(2)}px';
imageStyle
// left,top are set to 0 (although position is absolute) because
// Chrome will glitch if you leave them out, reproducible with
// canvas_image_blend_test on row 6, MacOS / Chrome 81.04.
..left = '0px'
..top = '0px'
..width = widthPx
..height = heightPx;
if (!domInstanceOfString(imageElement, 'HTMLImageElement')) {
imageElement.style.backgroundSize = '$widthPx $heightPx';
}
}
// Creates a Div element to render an image using background-image css
// attribute to be able to use background blend mode(s) when possible.
//
// Example: <div style="
// position:absolute;
// background-image:url(....);
// background-blend-mode:"darken"
// background-color: #RRGGBB">
//
// Special cases:
// For clear,dstOut it generates a blank element.
// For src,srcOver it only sets background-color attribute.
// For dst,dstIn , it only sets source not background color.
DomHTMLElement _createBackgroundImageWithBlend(
HtmlImage image,
ui.Color? filterColor,
ui.BlendMode colorFilterBlendMode,
SurfacePaintData paint) {
// When blending with color we can't use an image element.
// Instead use a div element with background image, color and
// background blend mode.
final DomHTMLElement imgElement = createDomHTMLDivElement();
final DomCSSStyleDeclaration style = imgElement.style;
switch (colorFilterBlendMode) {
case ui.BlendMode.clear:
case ui.BlendMode.dstOut:
style.position = 'absolute';
case ui.BlendMode.src:
case ui.BlendMode.srcOver:
style
..position = 'absolute'
..backgroundColor = filterColor!.toCssString();
case ui.BlendMode.dst:
case ui.BlendMode.dstIn:
style
..position = 'absolute'
..backgroundImage = "url('${image.imgElement.src}')";
default:
style
..position = 'absolute'
..backgroundImage = "url('${image.imgElement.src}')"
..backgroundBlendMode =
blendModeToCssMixBlendMode(colorFilterBlendMode) ?? ''
..backgroundColor = filterColor!.toCssString();
break;
}
return imgElement;
}
// Creates an image element and an svg filter to apply on the element.
DomHTMLElement _createImageElementWithSvgBlendFilter(
HtmlImage image,
ui.Color? filterColor,
ui.BlendMode colorFilterBlendMode,
SurfacePaintData paint) {
// For srcIn blendMode, we use an svg filter to apply to image element.
final SvgFilter svgFilter = svgFilterFromBlendMode(filterColor, colorFilterBlendMode);
rootElement.append(svgFilter.element);
_children.add(svgFilter.element);
final DomHTMLElement imgElement = _reuseOrCreateImage(image);
imgElement.style.filter = 'url(#${svgFilter.id})';
if (colorFilterBlendMode == ui.BlendMode.saturation) {
imgElement.style.backgroundColor = filterColor!.toCssString();
}
return imgElement;
}
// Creates an image element and an svg color matrix filter to apply on the element.
DomHTMLElement _createImageElementWithSvgColorMatrixFilter(
HtmlImage image, List<double> matrix, SurfacePaintData paint) {
// For srcIn blendMode, we use an svg filter to apply to image element.
final SvgFilter svgFilter = svgFilterFromColorMatrix(matrix);
rootElement.append(svgFilter.element);
_children.add(svgFilter.element);
final DomHTMLElement imgElement = _reuseOrCreateImage(image);
imgElement.style.filter = 'url(#${svgFilter.id})';
return imgElement;
}
// Should be called when we add new html elements into rootElement so that
// paint order is preserved.
//
// For example if we draw a path and then a paragraph and image:
// - rootElement
// |--- <canvas>
// |--- <p>
// |--- <img>
// Any drawing operations after these tags should allocate a new canvas,
// instead of drawing into earlier canvas.
void _closeCanvas() {
_canvasPool.closeCanvas();
_childOverdraw = true;
_cachedLastCssFont = null;
}
void setCssFont(String cssFont, ui.TextDirection textDirection) {
final DomCanvasRenderingContext2D ctx = _canvasPool.context;
ctx.direction = textDirection == ui.TextDirection.ltr ? 'ltr' : 'rtl';
if (cssFont != _cachedLastCssFont) {
ctx.font = cssFont;
_cachedLastCssFont = cssFont;
}
}
/// Measures the given [text] and returns a [DomTextMetrics] object that
/// contains information about the measurement.
///
/// The text is measured using the font set by the most recent call to
/// [setCssFont].
DomTextMetrics measureText(String text) {
return _canvasPool.context.measureText(text);
}
/// Draws text to the canvas starting at coordinate ([x], [y]).
///
/// The text is drawn starting at coordinates ([x], [y]). It uses the current
/// font set by the most recent call to [setCssFont].
void drawText(String text, double x, double y, {ui.PaintingStyle? style, List<ui.Shadow>? shadows}) {
final DomCanvasRenderingContext2D ctx = _canvasPool.context;
if (shadows != null) {
ctx.save();
for (final ui.Shadow shadow in shadows) {
ctx.shadowColor = shadow.color.toCssString();
ctx.shadowBlur = shadow.blurRadius;
ctx.shadowOffsetX = shadow.offset.dx;
ctx.shadowOffsetY = shadow.offset.dy;
if (style == ui.PaintingStyle.stroke) {
ctx.strokeText(text, x, y);
} else {
ctx.fillText(text, x, y);
}
}
ctx.restore();
}
if (style == ui.PaintingStyle.stroke) {
ctx.strokeText(text, x, y);
} else {
ctx.fillText(text, x, y);
}
}
@override
void drawParagraph(CanvasParagraph paragraph, ui.Offset offset) {
assert(paragraph.isLaidOut);
// Normally, text is composited as a plain HTML <p> tag. However, if a
// bitmap canvas was used for a preceding drawing command, then it's more
// efficient to continue compositing into the existing canvas, if possible.
// Whether it's possible to composite a paragraph into a 2D canvas depends
// on the following:
final bool canCompositeIntoBitmapCanvas =
// Cannot composite if the paragraph cannot be drawn into bitmap canvas
// in the first place.
paragraph.canDrawOnCanvas &&
// Cannot composite if there's no bitmap canvas to composite into.
// Creating a new bitmap canvas just to draw text doesn't make sense.
_canvasPool.hasCanvas &&
!_childOverdraw &&
// Bitmap canvas introduces correctness issues in the presence of SVG
// filters, so prefer plain HTML in this case.
!_renderStrategy.isInsideSvgFilterTree;
if (canCompositeIntoBitmapCanvas) {
paragraph.paint(this, offset);
return;
}
final DomElement paragraphElement =
drawParagraphElement(paragraph, offset);
if (_canvasPool.isClipped) {
final List<DomElement> clipElements = _clipContent(
_canvasPool.clipStack!,
paragraphElement,
offset,
_canvasPool.currentTransform);
for (final DomElement clipElement in clipElements) {
rootElement.append(clipElement);
_children.add(clipElement);
}
} else {
setElementTransform(
paragraphElement,
transformWithOffset(_canvasPool.currentTransform, offset).storage,
);
rootElement.append(paragraphElement);
}
_children.add(paragraphElement);
// If there is a prior sibling such as img prevent left/top shift.
paragraphElement.style
..left = '0px'
..top = '0px';
_closeCanvas();
}
/// Draws vertices on a gl context.
///
/// If both colors and textures is specified in paint data,
/// for [BlendMode.source] we skip colors and use textures,
/// for [BlendMode.dst] we only use colors and ignore textures.
/// We also skip paint shader when no texture is specified.
///
/// If no colors or textures are specified, stroke hairlines with
/// [Paint.color].
///
/// If colors is specified, convert colors to premultiplied (alpha) colors
/// and use a SkTriColorShader to render.
@override
void drawVertices(SurfaceVertices vertices, ui.BlendMode blendMode,
SurfacePaintData paint) {
// TODO(ferhat): Implement shaders for [Paint.shader] and
// blendMode. https://github.com/flutter/flutter/issues/40096
// Move rendering to OffscreenCanvas so that transform is preserved
// as well.
assert(paint.shader == null || paint.shader is EngineImageShader,
'Linear/Radial/SweepGradient not supported yet');
final Int32List? colors = vertices.colors;
final ui.VertexMode mode = vertices.mode;
final DomCanvasRenderingContext2D ctx = _canvasPool.context;
if (colors == null &&
paint.style != ui.PaintingStyle.fill &&
paint.shader == null) {
final Float32List positions = mode == ui.VertexMode.triangles
? vertices.positions
: convertVertexPositions(mode, vertices.positions);
// Draw hairline for vertices if no vertex colors are specified.
save();
final ui.Color color = ui.Color(paint.color);
_canvasPool.contextHandle
..fillStyle = null
..strokeStyle = color.toCssString();
glRenderer!.drawHairline(ctx, positions);
restore();
return;
}
glRenderer!.drawVertices(ctx, widthInBitmapPixels, heightInBitmapPixels,
_canvasPool.currentTransform, vertices, blendMode, paint);
}
/// Stores paint data used by [drawPoints]. We cannot use the original paint
/// data object because painting style is determined by [ui.PointMode] and
/// not by [SurfacePointData.style].
static final SurfacePaintData _drawPointsPaint = SurfacePaintData()
..strokeCap = ui.StrokeCap.round
..strokeJoin = ui.StrokeJoin.round
..blendMode = ui.BlendMode.srcOver;
@override
void drawPoints(
ui.PointMode pointMode, Float32List points, SurfacePaintData paint) {
if (pointMode == ui.PointMode.points) {
_drawPointsPaint.style = ui.PaintingStyle.stroke;
} else {
_drawPointsPaint.style = ui.PaintingStyle.fill;
}
_drawPointsPaint.color = paint.color;
_drawPointsPaint.maskFilter = paint.maskFilter;
final double dpr = EngineFlutterDisplay.instance.devicePixelRatio;
// Use hairline (device pixel when strokeWidth is not specified).
final double strokeWidth =
paint.strokeWidth == null ? 1.0 / dpr : paint.strokeWidth!;
_drawPointsPaint.strokeWidth = strokeWidth;
setUpPaint(_drawPointsPaint, null);
// Draw point using circle with half radius.
_canvasPool.drawPoints(pointMode, points, strokeWidth / 2.0);
tearDownPaint();
}
@override
void endOfPaint() {
_canvasPool.endOfPaint();
_elementCache?.commitFrame();
if (_contains3dTransform && browserEngine == BrowserEngine.webkit) {
// Copy the children list to avoid concurrent modification.
final List<DomElement> children = rootElement.children.toList();
for (final DomElement element in children) {
final DomHTMLDivElement paintOrderElement = createDomHTMLDivElement()
..style.transform = 'translate3d(0,0,0)';
paintOrderElement.append(element);
rootElement.append(paintOrderElement);
_children.add(paintOrderElement);
}
}
final DomNode? firstChild = rootElement.firstChild;
if (firstChild != null) {
if (domInstanceOfString(firstChild, 'HTMLElement')) {
final DomHTMLElement maybeCanvas = firstChild as DomHTMLElement;
if (maybeCanvas.tagName.toLowerCase() == 'canvas') {
maybeCanvas.style.zIndex = '-1';
}
}
}
}
/// Computes paint bounds given [targetTransform] to completely cover window
/// viewport.
ui.Rect _computeScreenBounds(Matrix4 targetTransform) {
final Matrix4 inverted = targetTransform.clone()..invert();
final double dpr = EngineFlutterDisplay.instance.devicePixelRatio;
final double width = ui.window.physicalSize.width * dpr;
final double height = ui.window.physicalSize.height * dpr;
final Vector3 topLeft = inverted.perspectiveTransform(x: 0, y: 0, z: 0);
final Vector3 topRight = inverted.perspectiveTransform(x: width, y: 0, z: 0);
final Vector3 bottomRight =
inverted.perspectiveTransform(x: width, y: height, z: 0);
final Vector3 bottomLeft = inverted.perspectiveTransform(x: 0, y: height, z: 0);
return ui.Rect.fromLTRB(
math.min(topLeft.x,
math.min(topRight.x, math.min(bottomRight.x, bottomLeft.x))),
math.min(topLeft.y,
math.min(topRight.y, math.min(bottomRight.y, bottomLeft.y))),
math.max(topLeft.x,
math.max(topRight.x, math.max(bottomRight.x, bottomLeft.x))),
math.max(topLeft.y,
math.max(topRight.y, math.max(bottomRight.y, bottomLeft.y))),
);
}
/// Computes paint bounds to completely cover picture.
ui.Rect _computePictureBounds() {
return ui.Rect.fromLTRB(0, 0, _bounds.width, _bounds.height);
}
}
/// The CSS value for the `mix-blend-mode` CSS property.
///
/// This list includes values supposrted by SVG, but it's not the same.
///
/// See also:
///
/// * https://developer.mozilla.org/en-US/docs/Web/CSS/mix-blend-mode
/// * [blendModeToSvgEnum], which specializes on SVG blend modes
String? blendModeToCssMixBlendMode(ui.BlendMode? blendMode) {
if (blendMode == null) {
return null;
}
switch (blendMode) {
case ui.BlendMode.srcOver:
return 'source-over';
case ui.BlendMode.srcIn:
return 'source-in';
case ui.BlendMode.srcOut:
return 'source-out';
case ui.BlendMode.srcATop:
return 'source-atop';
case ui.BlendMode.dstOver:
return 'destination-over';
case ui.BlendMode.dstIn:
return 'destination-in';
case ui.BlendMode.dstOut:
return 'destination-out';
case ui.BlendMode.dstATop:
return 'destination-atop';
case ui.BlendMode.plus:
return 'lighten';
case ui.BlendMode.src:
return 'copy';
case ui.BlendMode.xor:
return 'xor';
case ui.BlendMode.multiply:
// Falling back to multiply, ignoring alpha channel.
// TODO(ferhat): only used for debug, find better fallback for web.
case ui.BlendMode.modulate:
return 'multiply';
case ui.BlendMode.screen:
return 'screen';
case ui.BlendMode.overlay:
return 'overlay';
case ui.BlendMode.darken:
return 'darken';
case ui.BlendMode.lighten:
return 'lighten';
case ui.BlendMode.colorDodge:
return 'color-dodge';
case ui.BlendMode.colorBurn:
return 'color-burn';
case ui.BlendMode.hardLight:
return 'hard-light';
case ui.BlendMode.softLight:
return 'soft-light';
case ui.BlendMode.difference:
return 'difference';
case ui.BlendMode.exclusion:
return 'exclusion';
case ui.BlendMode.hue:
return 'hue';
case ui.BlendMode.saturation:
return 'saturation';
case ui.BlendMode.color:
return 'color';
case ui.BlendMode.luminosity:
return 'luminosity';
default:
throw UnimplementedError(
'Flutter Web does not support the blend mode: $blendMode');
}
}
// Source: https://www.w3.org/TR/SVG11/filters.html#InterfaceSVGFEBlendElement
// These constant names deviate from Dart's camelCase convention on purpose to
// make it easier to search for them in W3 specs and in Chromium sources.
const int SVG_FEBLEND_MODE_UNKNOWN = 0;
const int SVG_FEBLEND_MODE_NORMAL = 1;
const int SVG_FEBLEND_MODE_MULTIPLY = 2;
const int SVG_FEBLEND_MODE_SCREEN = 3;
const int SVG_FEBLEND_MODE_DARKEN = 4;
const int SVG_FEBLEND_MODE_LIGHTEN = 5;
const int SVG_FEBLEND_MODE_OVERLAY = 6;
const int SVG_FEBLEND_MODE_COLOR_DODGE = 7;
const int SVG_FEBLEND_MODE_COLOR_BURN = 8;
const int SVG_FEBLEND_MODE_HARD_LIGHT = 9;
const int SVG_FEBLEND_MODE_SOFT_LIGHT = 10;
const int SVG_FEBLEND_MODE_DIFFERENCE = 11;
const int SVG_FEBLEND_MODE_EXCLUSION = 12;
const int SVG_FEBLEND_MODE_HUE = 13;
const int SVG_FEBLEND_MODE_SATURATION = 14;
const int SVG_FEBLEND_MODE_COLOR = 15;
const int SVG_FEBLEND_MODE_LUMINOSITY = 16;
// Source: https://github.com/chromium/chromium/blob/e1e495b29e1178a451f65980a6c4ae017c34dc94/third_party/blink/renderer/platform/graphics/graphics_types.cc#L55
const String kCompositeClear = 'clear';
const String kCompositeCopy = 'copy';
const String kCompositeSourceOver = 'source-over';
const String kCompositeSourceIn = 'source-in';
const String kCompositeSourceOut = 'source-out';
const String kCompositeSourceAtop = 'source-atop';
const String kCompositeDestinationOver = 'destination-over';
const String kCompositeDestinationIn = 'destination-in';
const String kCompositeDestinationOut = 'destination-out';
const String kCompositeDestinationAtop = 'destination-atop';
const String kCompositeXor = 'xor';
const String kCompositeLighter = 'lighter';
/// Compositing and blending operation in SVG.
///
/// Flutter's [BlendMode] flattens what SVG expresses as two orthogonal
/// properties, a composite operator and blend mode. Instances of this class
/// are returned from [blendModeToSvgEnum] by mapping Flutter's [BlendMode]
/// enum onto the SVG equivalent.
///
/// See also:
///
/// * https://www.w3.org/TR/compositing-1
/// * https://github.com/chromium/chromium/blob/e1e495b29e1178a451f65980a6c4ae017c34dc94/third_party/blink/renderer/platform/graphics/graphics_types.cc#L55
/// * https://github.com/chromium/chromium/blob/e1e495b29e1178a451f65980a6c4ae017c34dc94/third_party/blink/renderer/modules/canvas/canvas2d/base_rendering_context_2d.cc#L725
class SvgBlendMode {
const SvgBlendMode(this.compositeOperator, this.blendMode);
/// The name of the SVG composite operator.
///
/// If this mode represents a blend mode, this is set to [kCompositeSourceOver].
final String compositeOperator;
/// The identifier of the SVG blend mode.
///
/// This is mode represents a compositing operation, this is set to [SVG_FEBLEND_MODE_UNKNOWN].
final int blendMode;
}
/// Converts Flutter's [ui.BlendMode] to SVG's <compositing operation, blend mode> pair.
SvgBlendMode? blendModeToSvgEnum(ui.BlendMode? blendMode) {
if (blendMode == null) {
return null;
}
switch (blendMode) {
case ui.BlendMode.clear:
return const SvgBlendMode(kCompositeClear, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.srcOver:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.srcIn:
return const SvgBlendMode(kCompositeSourceIn, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.srcOut:
return const SvgBlendMode(kCompositeSourceOut, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.srcATop:
return const SvgBlendMode(kCompositeSourceAtop, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.dstOver:
return const SvgBlendMode(kCompositeDestinationOver, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.dstIn:
return const SvgBlendMode(kCompositeDestinationIn, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.dstOut:
return const SvgBlendMode(kCompositeDestinationOut, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.dstATop:
return const SvgBlendMode(kCompositeDestinationAtop, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.plus:
return const SvgBlendMode(kCompositeLighter, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.src:
return const SvgBlendMode(kCompositeCopy, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.xor:
return const SvgBlendMode(kCompositeXor, SVG_FEBLEND_MODE_UNKNOWN);
case ui.BlendMode.multiply:
// Falling back to multiply, ignoring alpha channel.
// TODO(ferhat): only used for debug, find better fallback for web.
case ui.BlendMode.modulate:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_MULTIPLY);
case ui.BlendMode.screen:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_SCREEN);
case ui.BlendMode.overlay:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_OVERLAY);
case ui.BlendMode.darken:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_DARKEN);
case ui.BlendMode.lighten:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_LIGHTEN);
case ui.BlendMode.colorDodge:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_COLOR_DODGE);
case ui.BlendMode.colorBurn:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_COLOR_BURN);
case ui.BlendMode.hardLight:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_HARD_LIGHT);
case ui.BlendMode.softLight:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_SOFT_LIGHT);
case ui.BlendMode.difference:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_DIFFERENCE);
case ui.BlendMode.exclusion:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_EXCLUSION);
case ui.BlendMode.hue:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_HUE);
case ui.BlendMode.saturation:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_SATURATION);
case ui.BlendMode.color:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_COLOR);
case ui.BlendMode.luminosity:
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_LUMINOSITY);
default:
assert(
false,
'Flutter Web does not support the blend mode: $blendMode',
);
return const SvgBlendMode(kCompositeSourceOver, SVG_FEBLEND_MODE_NORMAL);
}
}
String? stringForStrokeCap(ui.StrokeCap? strokeCap) {
if (strokeCap == null) {
return null;
}
switch (strokeCap) {
case ui.StrokeCap.butt:
return 'butt';
case ui.StrokeCap.round:
return 'round';
case ui.StrokeCap.square:
default:
return 'square';
}
}
String stringForStrokeJoin(ui.StrokeJoin strokeJoin) {
switch (strokeJoin) {
case ui.StrokeJoin.round:
return 'round';
case ui.StrokeJoin.bevel:
return 'bevel';
case ui.StrokeJoin.miter:
default:
return 'miter';
}
}
/// Clips the content element against a stack of clip operations and returns
/// root of a tree that contains content node.
///
/// The stack of clipping rectangles generate an element that either uses
/// overflow:hidden with bounds to clip child or sets a clip-path to clip
/// it's contents. The clipping rectangles are nested and returned together
/// with a list of svg elements that provide clip-paths.
List<DomElement> _clipContent(List<SaveClipEntry> clipStack,
DomElement content, ui.Offset offset, Matrix4 currentTransform) {
DomElement? root, curElement;
final List<DomElement> clipDefs = <DomElement>[];
final int len = clipStack.length;
for (int clipIndex = 0; clipIndex < len; clipIndex++) {
final SaveClipEntry entry = clipStack[clipIndex];
final DomHTMLElement newElement = createDomHTMLDivElement();
newElement.style.position = 'absolute';
applyWebkitClipFix(newElement);
if (root == null) {
root = newElement;
} else {
curElement!.append(newElement);
}
curElement = newElement;
final ui.Rect? rect = entry.rect;
Matrix4 newClipTransform = entry.currentTransform;
final TransformKind transformKind =
transformKindOf(newClipTransform.storage);
final bool requiresTransformStyle = transformKind == TransformKind.complex;
if (rect != null) {
final double clipOffsetX = rect.left;
final double clipOffsetY = rect.top;
newClipTransform = newClipTransform.clone()
..translate(clipOffsetX, clipOffsetY);
curElement.style
..overflow = 'hidden'
..width = '${rect.right - clipOffsetX}px'
..height = '${rect.bottom - clipOffsetY}px';
setElementTransform(curElement, newClipTransform.storage);
} else if (entry.rrect != null) {
final ui.RRect roundRect = entry.rrect!;
final String borderRadius =
'${roundRect.tlRadiusX}px ${roundRect.trRadiusX}px '
'${roundRect.brRadiusX}px ${roundRect.blRadiusX}px';
final double clipOffsetX = roundRect.left;
final double clipOffsetY = roundRect.top;
newClipTransform = newClipTransform.clone()
..translate(clipOffsetX, clipOffsetY);
curElement.style
..borderRadius = borderRadius
..overflow = 'hidden'
..width = '${roundRect.right - clipOffsetX}px'
..height = '${roundRect.bottom - clipOffsetY}px';
setElementTransform(curElement, newClipTransform.storage);
} else if (entry.path != null) {
// Clipping optimization when we know that the path is an oval.
// We use a div with border-radius set to 50% with a size that is
// set to path bounds and set overflow to hidden.
final SurfacePath surfacePath = entry.path! as SurfacePath;
if (surfacePath.pathRef.isOval != -1) {
final ui.Rect ovalBounds = surfacePath.getBounds();
final double clipOffsetX = ovalBounds.left;
final double clipOffsetY = ovalBounds.top;
newClipTransform = newClipTransform.clone()
..translate(clipOffsetX, clipOffsetY);
curElement.style
..overflow = 'hidden'
..width = '${ovalBounds.width}px'
..height = '${ovalBounds.height}px'
..borderRadius = '50%';
setElementTransform(curElement, newClipTransform.storage);
} else {
// Abitrary path clipping.
curElement.style
..transform = matrix4ToCssTransform(newClipTransform)
..transformOrigin = '0 0 0';
final DomElement clipElement =
createSvgClipDef(curElement, entry.path!);
clipDefs.add(clipElement);
}
}
// Reverse the transform of the clipping element so children can use
// effective transform to render.
// TODO(ferhat): When we have more than a single clip element,
// reduce number of div nodes by merging (multiplying transforms).
final DomElement reverseTransformDiv = createDomHTMLDivElement();
reverseTransformDiv.style.position = 'absolute';
setElementTransform(
reverseTransformDiv,
(newClipTransform.clone()..invert()).storage,
);
if (requiresTransformStyle) {
// Instead of flattening matrix3d, preserve so it can be reversed.
curElement.style.transformStyle = 'preserve-3d';
reverseTransformDiv.style.transformStyle = 'preserve-3d';
}
curElement.append(reverseTransformDiv);
curElement = reverseTransformDiv;
}
root!.style.position = 'absolute';
curElement!.append(content);
setElementTransform(
content,
transformWithOffset(currentTransform, offset).storage,
);
return <DomElement>[root, ...clipDefs];
}
/// Converts a [maskFilter] to the value to be used on a `<canvas>`.
///
/// Only supported in non-WebKit browsers.
String maskFilterToCanvasFilter(ui.MaskFilter? maskFilter) {
assert(
browserEngine != BrowserEngine.webkit,
'WebKit (Safari) does not support `filter` canvas property.',
);
if (maskFilter != null) {
// Multiply by device-pixel ratio because the canvas' pixel width and height
// are larger than its CSS width and height by device-pixel ratio.
return 'blur(${maskFilter.webOnlySigma * EngineFlutterDisplay.instance.devicePixelRatio}px)';
} else {
return 'none';
}
}
| engine/lib/web_ui/lib/src/engine/html/bitmap_canvas.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/html/bitmap_canvas.dart",
"repo_id": "engine",
"token_count": 20336
} | 292 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui;
import 'conic.dart';
import 'path_ref.dart';
import 'path_utils.dart';
/// Converts [path] to SVG path syntax to be used as "d" attribute in path
/// element.
String pathToSvg(PathRef pathRef, {double offsetX = 0, double offsetY = 0}) {
final StringBuffer buffer = StringBuffer();
final PathRefIterator iter = PathRefIterator(pathRef);
int verb = 0;
final Float32List outPts = Float32List(PathRefIterator.kMaxBufferSize);
while ((verb = iter.next(outPts)) != SPath.kDoneVerb) {
switch (verb) {
case SPath.kMoveVerb:
buffer.write('M ${outPts[0] + offsetX} ${outPts[1] + offsetY}');
case SPath.kLineVerb:
buffer.write('L ${outPts[2] + offsetX} ${outPts[3] + offsetY}');
case SPath.kCubicVerb:
buffer.write('C ${outPts[2] + offsetX} ${outPts[3] + offsetY} '
'${outPts[4] + offsetX} ${outPts[5] + offsetY} ${outPts[6] + offsetX} ${outPts[7] + offsetY}');
case SPath.kQuadVerb:
buffer.write('Q ${outPts[2] + offsetX} ${outPts[3] + offsetY} '
'${outPts[4] + offsetX} ${outPts[5] + offsetY}');
case SPath.kConicVerb:
final double w = iter.conicWeight;
final Conic conic = Conic(outPts[0], outPts[1], outPts[2], outPts[3],
outPts[4], outPts[5], w);
final List<ui.Offset> points = conic.toQuads();
final int len = points.length;
for (int i = 1; i < len; i += 2) {
final double p1x = points[i].dx;
final double p1y = points[i].dy;
final double p2x = points[i + 1].dx;
final double p2y = points[i + 1].dy;
buffer.write('Q ${p1x + offsetX} ${p1y + offsetY} '
'${p2x + offsetX} ${p2y + offsetY}');
}
case SPath.kCloseVerb:
buffer.write('Z');
default:
throw UnimplementedError('Unknown path verb $verb');
}
}
return buffer.toString();
}
| engine/lib/web_ui/lib/src/engine/html/path/path_to_svg.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/html/path/path_to_svg.dart",
"repo_id": "engine",
"token_count": 946
} | 293 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui;
import '../../browser_detection.dart';
import '../../color_filter.dart';
import '../../dom.dart';
import '../../safe_browser_api.dart';
import '../../util.dart';
import '../../validators.dart';
import '../../vector_math.dart';
import '../color_filter.dart';
import '../path/path_utils.dart';
import '../render_vertices.dart';
import '../resource_manager.dart';
import 'normalized_gradient.dart';
import 'shader_builder.dart';
import 'vertex_shaders.dart';
const double kFltEpsilon = 1.19209290E-07; // == 1 / (2 ^ 23)
const double kFltEpsilonSquared = 1.19209290E-07 * 1.19209290E-07;
class SharedCanvas {
OffScreenCanvas? _canvas;
bool _checkedOut = false;
GlContext checkOutContext(int width, int height) {
assert(!_checkedOut);
_checkedOut = true;
if(_canvas == null) {
_canvas = OffScreenCanvas(width, height);
} else {
_canvas!.resize(width, height);
}
return GlContext(_canvas!);
}
void checkInContext() {
assert(_checkedOut);
_checkedOut = false;
}
}
SharedCanvas _sharedCanvas = SharedCanvas();
abstract class EngineGradient implements ui.Gradient {
/// Hidden constructor to prevent subclassing.
EngineGradient._();
/// Creates a fill style to be used in painting.
Object createPaintStyle(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density);
/// Creates a CanvasImageSource to paint gradient.
Object createImageBitmap(
ui.Rect? shaderBounds, double density, bool createDataUrl);
@override
bool debugDisposed = false;
@override
void dispose() {}
@override
String toString() => 'Gradient()';
}
class GradientSweep extends EngineGradient {
GradientSweep(this.center, this.colors, this.colorStops, this.tileMode,
this.startAngle, this.endAngle, this.matrix4)
: assert(offsetIsValid(center)),
super._() {
validateColorStops(colors, colorStops);
}
@override
Object createImageBitmap(
ui.Rect? shaderBounds, double density, bool createDataUrl) {
assert(shaderBounds != null);
final int widthInPixels = shaderBounds!.width.ceil();
final int heightInPixels = shaderBounds.height.ceil();
assert(widthInPixels > 0 && heightInPixels > 0);
initWebGl();
// Render gradient into a bitmap and create a canvas pattern.
final GlContext gl = _sharedCanvas.checkOutContext(widthInPixels, heightInPixels);
gl.setViewportSize(widthInPixels, heightInPixels);
final NormalizedGradient normalizedGradient =
NormalizedGradient(colors, stops: colorStops);
final GlProgram glProgram = gl.cacheProgram(VertexShaders.writeBaseVertexShader(),
_createSweepFragmentShader(normalizedGradient, tileMode));
gl.useProgram(glProgram);
final Object tileOffset =
gl.getUniformLocation(glProgram.program, 'u_tile_offset');
final double centerX = (center.dx - shaderBounds.left) / (shaderBounds.width);
final double centerY = (center.dy - shaderBounds.top) / (shaderBounds.height);
gl.setUniform2f(tileOffset, 2 * (shaderBounds.width * (centerX - 0.5)),
2 * (shaderBounds.height * (0.5 - centerY)));
final Object angleRange = gl.getUniformLocation(glProgram.program, 'angle_range');
gl.setUniform2f(angleRange, startAngle, endAngle);
normalizedGradient.setupUniforms(gl, glProgram);
final Object gradientMatrix =
gl.getUniformLocation(glProgram.program, 'm_gradient');
final Matrix4 gradientTransform = Matrix4.identity();
if (matrix4 != null) {
final Matrix4 m4 = Matrix4.zero()
..copyInverse(Matrix4.fromFloat32List(matrix4!));
gradientTransform.translate(-center.dx, -center.dy);
gradientTransform.multiply(m4);
gradientTransform.translate(center.dx, center.dy);
}
gl.setUniformMatrix4fv(gradientMatrix, false, gradientTransform.storage);
final Object result = () {
if (createDataUrl) {
return glRenderer!.drawRectToImageUrl(
ui.Rect.fromLTWH(0, 0, shaderBounds.width, shaderBounds.height),
gl,
glProgram,
normalizedGradient,
widthInPixels,
heightInPixels);
} else {
return glRenderer!.drawRect(
ui.Rect.fromLTWH(0, 0, shaderBounds.width, shaderBounds.height),
gl,
glProgram,
normalizedGradient,
widthInPixels,
heightInPixels)!;
}
}();
_sharedCanvas.checkInContext();
return result;
}
@override
Object createPaintStyle(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density) {
final Object imageBitmap = createImageBitmap(shaderBounds, density, false);
return ctx!.createPattern(imageBitmap, 'no-repeat')!;
}
String _createSweepFragmentShader(
NormalizedGradient gradient, ui.TileMode tileMode) {
final ShaderBuilder builder = ShaderBuilder.fragment(webGLVersion);
builder.floatPrecision = ShaderPrecision.kMedium;
builder.addIn(ShaderType.kVec4, name: 'v_color');
builder.addUniform(ShaderType.kVec2, name: 'u_resolution');
builder.addUniform(ShaderType.kVec2, name: 'u_tile_offset');
builder.addUniform(ShaderType.kVec2, name: 'angle_range');
builder.addUniform(ShaderType.kMat4, name: 'm_gradient');
final ShaderDeclaration fragColor = builder.fragmentColor;
final ShaderMethod method = builder.addMethod('main');
// Sweep gradient
method.addStatement('vec2 center = 0.5 * (u_resolution + u_tile_offset);');
method.addStatement(
'vec4 localCoord = m_gradient * vec4(gl_FragCoord.x - center.x, center.y - gl_FragCoord.y, 0, 1);');
method.addStatement(
'float angle = atan(-localCoord.y, -localCoord.x) + ${math.pi};');
method.addStatement('float sweep = angle_range.y - angle_range.x;');
method.addStatement('angle = (angle - angle_range.x) / sweep;');
method.addStatement(
'float st = angle;');
final String probeName =
_writeSharedGradientShader(builder, method, gradient, tileMode);
method.addStatement('${fragColor.name} = $probeName * scale + bias;');
final String shader = builder.build();
return shader;
}
final ui.Offset center;
final List<ui.Color> colors;
final List<double>? colorStops;
final ui.TileMode tileMode;
final double startAngle;
final double endAngle;
final Float32List? matrix4;
}
class GradientLinear extends EngineGradient {
GradientLinear(
this.from,
this.to,
this.colors,
this.colorStops,
this.tileMode,
Float32List? matrix,
) : assert(offsetIsValid(from)),
assert(offsetIsValid(to)),
matrix4 = matrix == null ? null : FastMatrix32(matrix),
super._() {
// ignore: prefer_asserts_in_initializer_lists
assert(() {
validateColorStops(colors, colorStops);
return true;
}());
}
final ui.Offset from;
final ui.Offset to;
final List<ui.Color> colors;
final List<double>? colorStops;
final ui.TileMode tileMode;
final FastMatrix32? matrix4;
@override
Object createPaintStyle(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density) {
if (tileMode == ui.TileMode.clamp || tileMode == ui.TileMode.decal) {
return _createCanvasGradient(ctx, shaderBounds, density);
} else {
return _createGlGradient(ctx, shaderBounds, density);
}
}
DomCanvasGradient _createCanvasGradient(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density) {
final FastMatrix32? matrix4 = this.matrix4;
DomCanvasGradient gradient;
final double offsetX = shaderBounds!.left;
final double offsetY = shaderBounds.top;
if (matrix4 != null) {
// The matrix is relative to shaderBounds so we shift center by
// shaderBounds top-left origin.
final double centerX = (from.dx + to.dx) / 2.0 - shaderBounds.left;
final double centerY = (from.dy + to.dy) / 2.0 - shaderBounds.top;
matrix4.transform(from.dx - centerX, from.dy - centerY);
final double fromX = matrix4.transformedX + centerX;
final double fromY = matrix4.transformedY + centerY;
matrix4.transform(to.dx - centerX, to.dy - centerY);
gradient = ctx!.createLinearGradient(
fromX - offsetX,
fromY - offsetY,
matrix4.transformedX + centerX - offsetX,
matrix4.transformedY + centerY - offsetY);
} else {
gradient = ctx!.createLinearGradient(from.dx - offsetX, from.dy - offsetY,
to.dx - offsetX, to.dy - offsetY);
}
_addColorStopsToCanvasGradient(
gradient, colors, colorStops, tileMode == ui.TileMode.decal);
return gradient;
}
@override
Object createImageBitmap(
ui.Rect? shaderBounds, double density, bool createDataUrl) {
assert(shaderBounds != null);
final int widthInPixels = shaderBounds!.width.ceil();
final int heightInPixels = shaderBounds.height.ceil();
assert(widthInPixels > 0 && heightInPixels > 0);
initWebGl();
// Render gradient into a bitmap and create a canvas pattern.
final GlContext gl = _sharedCanvas.checkOutContext(widthInPixels, heightInPixels);
gl.setViewportSize(widthInPixels, heightInPixels);
final NormalizedGradient normalizedGradient =
NormalizedGradient(colors, stops: colorStops);
final GlProgram glProgram = gl.cacheProgram(VertexShaders.writeBaseVertexShader(),
_createLinearFragmentShader(normalizedGradient, tileMode));
gl.useProgram(glProgram);
// Setup from/to uniforms.
//
// From/to is relative to shaderBounds.
//
// To compute t value between 0..1 for any point on the screen,
// we need to use from,to point pair to construct a matrix that will
// take any fragment coordinate and transform it to a t value.
//
// We compute the matrix by:
// 1- Shift from,to vector to origin.
// 2- Rotate the vector to align with x axis.
// 3- Scale it to unit vector.
final double fromX = from.dx;
final double fromY = from.dy;
final double toX = to.dx;
final double toY = to.dy;
final double dx = toX - fromX;
final double dy = toY - fromY;
final double length = math.sqrt(dx * dx + dy * dy);
// sin(theta) = dy / length.
// cos(theta) = dx / length.
// Flip dy for gl flip.
final double sinVal = length < kFltEpsilon ? 0 : -dy / length;
final double cosVal = length < kFltEpsilon ? 1 : dx / length;
// If tile mode is repeated we need to shift the center of from->to
// vector to the center of shader bounds.
final bool isRepeated = tileMode != ui.TileMode.clamp;
final double originX = isRepeated
? (shaderBounds.width / 2)
: (fromX + toX) / 2.0 - shaderBounds.left;
final double originY = isRepeated
? (shaderBounds.height / 2)
: (fromY + toY) / 2.0 - shaderBounds.top;
final Matrix4 originTranslation =
Matrix4.translationValues(-originX, -originY, 0);
// Rotate around Z axis.
final Matrix4 rotationZ = Matrix4.identity();
final Float32List storage = rotationZ.storage;
storage[0] = cosVal;
// Sign is flipped since gl coordinate system is flipped around y axis.
storage[1] = sinVal;
storage[4] = -sinVal;
storage[5] = cosVal;
final Matrix4 gradientTransform = Matrix4.identity();
// We compute location based on gl_FragCoord to center distance which
// returns 0.0 at center. To make sure we align center of gradient to this
// point, we shift by 0.5 to get st value for center of gradient.
gradientTransform.translate(0.5);
if (length > kFltEpsilon) {
gradientTransform.scale(1.0 / length);
}
if (matrix4 != null) {
// Flutter GradientTransform is defined in shaderBounds coordinate system
// with flipped y axis.
// We flip y axis, translate to center, multiply matrix and translate
// and flip back so it is applied correctly.
final Matrix4 m4 = Matrix4.zero()
..copyInverse(Matrix4.fromFloat32List(matrix4!.matrix));
final ui.Offset center = shaderBounds.center;
gradientTransform.translate(-center.dx, -center.dy);
gradientTransform.multiply(m4);
gradientTransform.translate(center.dx, center.dy);
}
gradientTransform.multiply(rotationZ);
gradientTransform.multiply(originTranslation);
// Setup gradient uniforms for t search.
normalizedGradient.setupUniforms(gl, glProgram);
// Setup matrix transform uniform.
final Object gradientMatrix =
gl.getUniformLocation(glProgram.program, 'm_gradient');
gl.setUniformMatrix4fv(gradientMatrix, false, gradientTransform.storage);
final Object uRes = gl.getUniformLocation(glProgram.program, 'u_resolution');
gl.setUniform2f(uRes, widthInPixels.toDouble(), heightInPixels.toDouble());
final Object result = () {
if (createDataUrl) {
return glRenderer!.drawRectToImageUrl(
ui.Rect.fromLTWH(0, 0, shaderBounds.width,
shaderBounds.height) /* !! shaderBounds */,
gl,
glProgram,
normalizedGradient,
widthInPixels,
heightInPixels,
);
} else {
return glRenderer!.drawRect(
ui.Rect.fromLTWH(0, 0, shaderBounds.width,
shaderBounds.height) /* !! shaderBounds */,
gl,
glProgram,
normalizedGradient,
widthInPixels,
heightInPixels,
)!;
}
}();
_sharedCanvas.checkInContext();
return result;
}
/// Creates a linear gradient with tiling repeat or mirror.
DomCanvasPattern _createGlGradient(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density) {
final Object imageBitmap = createImageBitmap(shaderBounds, density, false);
return ctx!.createPattern(imageBitmap, 'no-repeat')!;
}
String _createLinearFragmentShader(
NormalizedGradient gradient, ui.TileMode tileMode) {
final ShaderBuilder builder = ShaderBuilder.fragment(webGLVersion);
builder.floatPrecision = ShaderPrecision.kMedium;
builder.addIn(ShaderType.kVec4, name: 'v_color');
builder.addUniform(ShaderType.kVec2, name: 'u_resolution');
builder.addUniform(ShaderType.kMat4, name: 'm_gradient');
final ShaderDeclaration fragColor = builder.fragmentColor;
final ShaderMethod method = builder.addMethod('main');
// Linear gradient.
// Multiply with m_gradient transform to convert from fragment coordinate to
// distance on the from-to line.
method.addStatement('vec4 localCoord = m_gradient * vec4(gl_FragCoord.x, '
'u_resolution.y - gl_FragCoord.y, 0, 1);');
method.addStatement('float st = localCoord.x;');
final String probeName =
_writeSharedGradientShader(builder, method, gradient, tileMode);
method.addStatement('${fragColor.name} = $probeName * scale + bias;');
final String shader = builder.build();
return shader;
}
}
void _addColorStopsToCanvasGradient(DomCanvasGradient gradient,
List<ui.Color> colors, List<double>? colorStops, bool isDecal) {
double scale, offset;
if (isDecal) {
scale = 0.999;
offset = (1.0 - scale) / 2.0;
gradient.addColorStop(0, '#00000000');
} else {
scale = 1.0;
offset = 0.0;
}
if (colorStops == null) {
assert(colors.length == 2);
gradient.addColorStop(offset, colors[0].toCssString());
gradient.addColorStop(1 - offset, colors[1].toCssString());
} else {
for (int i = 0; i < colors.length; i++) {
final double colorStop = colorStops[i].clamp(0.0, 1.0);
gradient.addColorStop(
colorStop * scale + offset, colors[i].toCssString());
}
}
if (isDecal) {
gradient.addColorStop(1, '#00000000');
}
}
/// Writes shader code to map fragment value to gradient color.
///
/// Returns name of gradient treshold variable to use to compute color.
String _writeSharedGradientShader(ShaderBuilder builder, ShaderMethod method,
NormalizedGradient gradient, ui.TileMode tileMode) {
method.addStatement('vec4 bias;');
method.addStatement('vec4 scale;');
// Write uniforms for each threshold, bias and scale.
for (int i = 0; i < (gradient.thresholdCount - 1) ~/ 4 + 1; i++) {
builder.addUniform(ShaderType.kVec4, name: 'threshold_$i');
}
for (int i = 0; i < gradient.thresholdCount; i++) {
builder.addUniform(ShaderType.kVec4, name: 'bias_$i');
builder.addUniform(ShaderType.kVec4, name: 'scale_$i');
}
// Use st variable name if clamped or decaled, otherwise write code to compute
// tiled_st.
String probeName = 'st';
switch (tileMode) {
case ui.TileMode.clamp:
method.addStatement('float tiled_st = clamp(st, 0.0, 1.0);');
probeName = 'tiled_st';
case ui.TileMode.decal:
break;
case ui.TileMode.repeated:
// st represents our distance from center. Flutter maps the center to
// center of gradient ramp so we need to add 0.5 to make sure repeated
// pattern center is at origin.
method.addStatement('float tiled_st = fract(st);');
probeName = 'tiled_st';
case ui.TileMode.mirror:
method.addStatement('float t_1 = (st - 1.0);');
method.addStatement(
'float tiled_st = abs((t_1 - 2.0 * floor(t_1 * 0.5)) - 1.0);');
probeName = 'tiled_st';
}
writeUnrolledBinarySearch(method, 0, gradient.thresholdCount - 1,
probe: probeName,
sourcePrefix: 'threshold',
biasName: 'bias',
scaleName: 'scale');
if (tileMode == ui.TileMode.decal) {
method.addStatement('if (st < 0.0 || st > 1.0) {');
method.addStatement(' ${builder.fragmentColor.name} = vec4(0, 0, 0, 0);');
method.addStatement(' return;');
method.addStatement('}');
}
return probeName;
}
class GradientRadial extends EngineGradient {
GradientRadial(this.center, this.radius, this.colors, this.colorStops,
this.tileMode, this.matrix4)
: super._();
final ui.Offset center;
final double radius;
final List<ui.Color> colors;
final List<double>? colorStops;
final ui.TileMode tileMode;
final Float32List? matrix4;
@override
Object createPaintStyle(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density) {
if (matrix4 == null && (tileMode == ui.TileMode.clamp || tileMode == ui.TileMode.decal)) {
return _createCanvasGradient(ctx, shaderBounds, density);
} else {
return _createGlGradient(ctx, shaderBounds, density);
}
}
Object _createCanvasGradient(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density) {
final double offsetX = shaderBounds!.left;
final double offsetY = shaderBounds.top;
final DomCanvasGradient gradient = ctx!.createRadialGradient(
center.dx - offsetX,
center.dy - offsetY,
0,
center.dx - offsetX,
center.dy - offsetY,
radius);
_addColorStopsToCanvasGradient(
gradient, colors, colorStops, tileMode == ui.TileMode.decal);
return gradient;
}
@override
Object createImageBitmap(
ui.Rect? shaderBounds, double density, bool createDataUrl) {
assert(shaderBounds != null);
final int widthInPixels = shaderBounds!.width.ceil();
final int heightInPixels = shaderBounds.height.ceil();
assert(widthInPixels > 0 && heightInPixels > 0);
initWebGl();
// Render gradient into a bitmap and create a canvas pattern.
final GlContext gl = _sharedCanvas.checkOutContext(widthInPixels, heightInPixels);
gl.setViewportSize(widthInPixels, heightInPixels);
final NormalizedGradient normalizedGradient =
NormalizedGradient(colors, stops: colorStops);
final GlProgram glProgram = gl.cacheProgram(
VertexShaders.writeBaseVertexShader(),
_createRadialFragmentShader(
normalizedGradient, shaderBounds, tileMode));
gl.useProgram(glProgram);
final Object tileOffset =
gl.getUniformLocation(glProgram.program, 'u_tile_offset');
final double centerX = (center.dx - shaderBounds.left) / (shaderBounds.width);
final double centerY = (center.dy - shaderBounds.top) / (shaderBounds.height);
gl.setUniform2f(tileOffset, 2 * (shaderBounds.width * (centerX - 0.5)),
2 * (shaderBounds.height * (0.5 - centerY)));
final Object radiusUniform = gl.getUniformLocation(glProgram.program, 'u_radius');
gl.setUniform1f(radiusUniform, radius);
normalizedGradient.setupUniforms(gl, glProgram);
final Object gradientMatrix =
gl.getUniformLocation(glProgram.program, 'm_gradient');
final Matrix4 gradientTransform = Matrix4.identity();
if (matrix4 != null) {
final Matrix4 m4 = Matrix4.zero()
..copyInverse(Matrix4.fromFloat32List(matrix4!));
gradientTransform.translate(-center.dx, -center.dy);
gradientTransform.multiply(m4);
gradientTransform.translate(center.dx, center.dy);
}
gl.setUniformMatrix4fv(gradientMatrix, false, gradientTransform.storage);
final Object result = () {
if (createDataUrl) {
return glRenderer!.drawRectToImageUrl(
ui.Rect.fromLTWH(0, 0, shaderBounds.width, shaderBounds.height),
gl,
glProgram,
normalizedGradient,
widthInPixels,
heightInPixels);
} else {
return glRenderer!.drawRect(
ui.Rect.fromLTWH(0, 0, shaderBounds.width, shaderBounds.height),
gl,
glProgram,
normalizedGradient,
widthInPixels,
heightInPixels)!;
}
}();
_sharedCanvas.checkInContext();
return result;
}
/// Creates a radial gradient with tiling repeat or mirror.
DomCanvasPattern _createGlGradient(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density) {
final Object imageBitmap = createImageBitmap(shaderBounds, density, false);
return ctx!.createPattern(imageBitmap, 'no-repeat')!;
}
String _createRadialFragmentShader(
NormalizedGradient gradient, ui.Rect shaderBounds, ui.TileMode tileMode) {
final ShaderBuilder builder = ShaderBuilder.fragment(webGLVersion);
builder.floatPrecision = ShaderPrecision.kMedium;
builder.addIn(ShaderType.kVec4, name: 'v_color');
builder.addUniform(ShaderType.kVec2, name: 'u_resolution');
builder.addUniform(ShaderType.kVec2, name: 'u_tile_offset');
builder.addUniform(ShaderType.kFloat, name: 'u_radius');
builder.addUniform(ShaderType.kMat4, name: 'm_gradient');
final ShaderDeclaration fragColor = builder.fragmentColor;
final ShaderMethod method = builder.addMethod('main');
// Sweep gradient
method.addStatement('vec2 center = 0.5 * (u_resolution + u_tile_offset);');
method.addStatement(
'vec4 localCoord = m_gradient * vec4(gl_FragCoord.x - center.x, center.y - gl_FragCoord.y, 0, 1);');
method.addStatement('float dist = length(localCoord);');
method.addStatement(
'float st = abs(dist / u_radius);');
final String probeName =
_writeSharedGradientShader(builder, method, gradient, tileMode);
method.addStatement('${fragColor.name} = $probeName * scale + bias;');
final String shader = builder.build();
return shader;
}
}
// TODO(ferhat): Implement focal https://github.com/flutter/flutter/issues/76643.
class GradientConical extends GradientRadial {
GradientConical(
this.focal,
this.focalRadius,
ui.Offset center,
double radius,
List<ui.Color> colors,
List<double>? colorStops,
ui.TileMode tileMode,
Float32List? matrix4)
: super(center, radius, colors, colorStops, tileMode, matrix4);
final ui.Offset focal;
final double focalRadius;
@override
Object createPaintStyle(DomCanvasRenderingContext2D? ctx,
ui.Rect? shaderBounds, double density) {
if ((tileMode == ui.TileMode.clamp || tileMode == ui.TileMode.decal) &&
focalRadius == 0.0 &&
focal == ui.Offset.zero) {
return _createCanvasGradient(ctx, shaderBounds, density);
} else {
initWebGl();
return _createGlGradient(ctx, shaderBounds, density);
}
}
@override
String _createRadialFragmentShader(
NormalizedGradient gradient, ui.Rect shaderBounds, ui.TileMode tileMode) {
/// If distance between centers is nearly zero we can pretend we're radial
/// to prevent divide by zero in computing gradient.
final double centerDistanceX = center.dx - focal.dx;
final double centerDistanceY = center.dy - focal.dy;
final double centerDistanceSq =
centerDistanceX * centerDistanceX + centerDistanceY * centerDistanceY;
if (centerDistanceSq < kFltEpsilonSquared) {
return super
._createRadialFragmentShader(gradient, shaderBounds, tileMode);
}
final double centerDistance = math.sqrt(centerDistanceSq);
double r0 = focalRadius / centerDistance;
double r1 = radius / centerDistance;
double fFocalX = r0 / (r0 - r1);
if ((fFocalX - 1).abs() < SPath.scalarNearlyZero) {
// swap r0, r1
final double temp = r0;
r0 = r1;
r1 = temp;
fFocalX = 0.0; // because r0 is now 0
}
final ShaderBuilder builder = ShaderBuilder.fragment(webGLVersion);
builder.floatPrecision = ShaderPrecision.kMedium;
builder.addIn(ShaderType.kVec4, name: 'v_color');
builder.addUniform(ShaderType.kVec2, name: 'u_resolution');
builder.addUniform(ShaderType.kVec2, name: 'u_tile_offset');
builder.addUniform(ShaderType.kFloat, name: 'u_radius');
builder.addUniform(ShaderType.kMat4, name: 'm_gradient');
final ShaderDeclaration fragColor = builder.fragmentColor;
final ShaderMethod method = builder.addMethod('main');
// Sweep gradient
method.addStatement('vec2 center = 0.5 * (u_resolution + u_tile_offset);');
method.addStatement(
'vec4 localCoord = m_gradient * vec4(gl_FragCoord.x - center.x, center.y - gl_FragCoord.y, 0, 1);');
method.addStatement('float dist = length(localCoord);');
final String f = (focalRadius /
(math.min(shaderBounds.width, shaderBounds.height) / 2.0))
.toStringAsPrecision(8);
method.addStatement(focalRadius == 0.0
? 'float st = dist / u_radius;'
: 'float st = ((dist / u_radius) - $f) / (1.0 - $f);');
if (tileMode == ui.TileMode.clamp) {
method.addStatement('if (st < 0.0) { st = -1.0; }');
}
final String probeName =
_writeSharedGradientShader(builder, method, gradient, tileMode);
method.addStatement('${fragColor.name} = $probeName * scale + bias;');
return builder.build();
}
}
/// Backend implementation of [ui.ImageFilter].
///
/// Currently only `blur` and `matrix` are supported.
abstract class EngineImageFilter implements ui.ImageFilter {
factory EngineImageFilter.blur({
required double sigmaX,
required double sigmaY,
required ui.TileMode tileMode,
}) = _BlurEngineImageFilter;
factory EngineImageFilter.matrix({
required Float64List matrix,
required ui.FilterQuality filterQuality,
}) = _MatrixEngineImageFilter;
EngineImageFilter._();
String get filterAttribute => '';
String get transformAttribute => '';
}
class _BlurEngineImageFilter extends EngineImageFilter {
_BlurEngineImageFilter({ this.sigmaX = 0.0, this.sigmaY = 0.0, this.tileMode = ui.TileMode.clamp }) : super._();
final double sigmaX;
final double sigmaY;
final ui.TileMode tileMode;
// TODO(ferhat): implement TileMode.
@override
String get filterAttribute => blurSigmasToCssString(sigmaX, sigmaY);
@override
bool operator ==(Object other) {
if (other.runtimeType != runtimeType) {
return false;
}
return other is _BlurEngineImageFilter &&
other.tileMode == tileMode &&
other.sigmaX == sigmaX &&
other.sigmaY == sigmaY;
}
@override
int get hashCode => Object.hash(sigmaX, sigmaY, tileMode);
@override
String toString() {
return 'ImageFilter.blur($sigmaX, $sigmaY, ${tileModeString(tileMode)})';
}
}
class _MatrixEngineImageFilter extends EngineImageFilter {
_MatrixEngineImageFilter({ required Float64List matrix, required this.filterQuality })
: webMatrix = Float64List.fromList(matrix),
super._();
final Float64List webMatrix;
final ui.FilterQuality filterQuality;
// TODO(yjbanov): implement FilterQuality.
@override
String get transformAttribute => float64ListToCssTransform(webMatrix);
@override
bool operator ==(Object other) {
if (other.runtimeType != runtimeType) {
return false;
}
return other is _MatrixEngineImageFilter
&& other.filterQuality == filterQuality
&& listEquals<double>(other.webMatrix, webMatrix);
}
@override
int get hashCode => Object.hash(Object.hashAll(webMatrix), filterQuality);
@override
String toString() {
return 'ImageFilter.matrix($webMatrix, $filterQuality)';
}
}
/// The backend implementation of [ui.ColorFilter]
///
/// Currently only 'mode' and 'matrix' are supported.
abstract class EngineHtmlColorFilter implements EngineImageFilter {
EngineHtmlColorFilter();
String? filterId;
@override
String get filterAttribute => (filterId != null) ? 'url(#$filterId)' : '';
@override
String get transformAttribute => '';
/// Make an [SvgFilter] and add it as a globabl resource using [ResourceManager]
/// The [DomElement] from the made [SvgFilter] is returned so it can be managed
/// by the surface calling it.
DomElement? makeSvgFilter(DomElement? filterElement);
}
class ModeHtmlColorFilter extends EngineHtmlColorFilter {
ModeHtmlColorFilter(this.color, this.blendMode);
final ui.Color color;
ui.BlendMode blendMode;
@override
DomElement? makeSvgFilter(DomElement? filterElement) {
switch (blendMode) {
case ui.BlendMode.clear:
case ui.BlendMode.dstOut:
case ui.BlendMode.srcOut:
filterElement!.style.visibility = 'hidden';
return null;
case ui.BlendMode.dst:
case ui.BlendMode.dstIn:
// Noop.
return null;
case ui.BlendMode.src:
case ui.BlendMode.srcOver:
// Uses source filter color.
// Since we don't have a size, we can't use background color.
// Use svg filter srcIn instead.
blendMode = ui.BlendMode.srcIn;
case ui.BlendMode.dstOver:
case ui.BlendMode.srcIn:
case ui.BlendMode.srcATop:
case ui.BlendMode.dstATop:
case ui.BlendMode.xor:
case ui.BlendMode.plus:
case ui.BlendMode.modulate:
case ui.BlendMode.screen:
case ui.BlendMode.overlay:
case ui.BlendMode.darken:
case ui.BlendMode.lighten:
case ui.BlendMode.colorDodge:
case ui.BlendMode.colorBurn:
case ui.BlendMode.hardLight:
case ui.BlendMode.softLight:
case ui.BlendMode.difference:
case ui.BlendMode.exclusion:
case ui.BlendMode.multiply:
case ui.BlendMode.hue:
case ui.BlendMode.saturation:
case ui.BlendMode.color:
case ui.BlendMode.luminosity:
break;
}
final SvgFilter svgFilter = svgFilterFromBlendMode(color, blendMode);
ResourceManager.instance.addResource(svgFilter.element);
filterId = svgFilter.id;
if (blendMode == ui.BlendMode.saturation ||
blendMode == ui.BlendMode.multiply ||
blendMode == ui.BlendMode.modulate) {
filterElement!.style.backgroundColor = color.toCssString();
}
return svgFilter.element;
}
}
class MatrixHtmlColorFilter extends EngineHtmlColorFilter {
MatrixHtmlColorFilter(this.matrix);
final List<double> matrix;
@override
DomElement? makeSvgFilter(DomNode? filterElement) {
final SvgFilter svgFilter = svgFilterFromColorMatrix(matrix);
ResourceManager.instance.addResource(svgFilter.element);
filterId = svgFilter.id;
return svgFilter.element;
}
}
/// Convert the current [ColorFilter] to an EngineHtmlColorFilter
///
/// This workaround allows ColorFilter to be const constructible and
/// efficiently comparable, so that widgets can check for COlorFIlter equality to
/// avoid repainting.
EngineHtmlColorFilter? createHtmlColorFilter(EngineColorFilter? colorFilter) {
if (colorFilter == null) {
return null;
}
switch (colorFilter.type) {
case ColorFilterType.mode:
if (colorFilter.color == null || colorFilter.blendMode == null) {
return null;
}
return ModeHtmlColorFilter(colorFilter.color!, colorFilter.blendMode!);
case ColorFilterType.matrix:
if (colorFilter.matrix == null) {
return null;
}
assert(colorFilter.matrix!.length == 20, 'Color Matrix must have 20 entries.');
return MatrixHtmlColorFilter(colorFilter.matrix!);
case ColorFilterType.linearToSrgbGamma:
throw UnimplementedError('ColorFilter.linearToSrgbGamma not implemented for HTML renderer');
case ColorFilterType.srgbToLinearGamma:
throw UnimplementedError('ColorFilter.srgbToLinearGamma not implemented for HTML renderer.');
default:
throw StateError('Unknown mode $colorFilter.type for ColorFilter.');
}
}
| engine/lib/web_ui/lib/src/engine/html/shaders/shader.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/html/shaders/shader.dart",
"repo_id": "engine",
"token_count": 12738
} | 294 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import '../dom.dart';
import 'prevent_default.dart';
/// Controls the browser's context menu in the given [element].
class ContextMenu {
ContextMenu(this.element);
final DomElement element;
/// False when the context menu has been disabled, otherwise true.
bool _enabled = true;
/// Disables the browser's context menu for this [element].
///
/// By default, when a Flutter web app starts, the context menu is enabled.
///
/// Can be re-enabled by calling [enable].
void disable() {
if (!_enabled) {
return;
}
_enabled = false;
element.addEventListener('contextmenu', preventDefaultListener);
}
/// Enables the browser's context menu for this [element].
///
/// By default, when a Flutter web app starts, the context menu is already
/// enabled. Typically, this method would be used after calling
/// [disable] to first disable it.
void enable() {
if (_enabled) {
return;
}
_enabled = true;
element.removeEventListener('contextmenu', preventDefaultListener);
}
}
| engine/lib/web_ui/lib/src/engine/mouse/context_menu.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/mouse/context_menu.dart",
"repo_id": "engine",
"token_count": 354
} | 295 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:js_interop';
import 'dart:math' as math;
import 'package:meta/meta.dart';
import 'package:ui/src/engine/keyboard_binding.dart';
import 'package:ui/ui.dart' as ui;
import '../engine.dart' show registerHotRestartListener;
import 'browser_detection.dart';
import 'dom.dart';
import 'platform_dispatcher.dart';
import 'pointer_binding/event_position_helper.dart';
import 'pointer_converter.dart';
import 'safe_browser_api.dart';
import 'semantics.dart';
import 'window.dart';
/// Set this flag to true to log all the browser events.
const bool _debugLogPointerEvents = false;
/// Set this to true to log all the events sent to the Flutter framework.
const bool _debugLogFlutterEvents = false;
/// The signature of a callback that handles pointer events.
typedef _PointerDataCallback = void Function(DomEvent event, List<ui.PointerData>);
// The mask for the bitfield of event buttons. Buttons not contained in this
// mask are cut off.
//
// In Flutter we used `kMaxUnsignedSMI`, but since that value is not available
// here, we use an already very large number (30 bits).
const int _kButtonsMask = 0x3FFFFFFF;
// Intentionally set to -1 or -2 so it doesn't conflict with other device IDs.
const int _mouseDeviceId = -1;
const int _trackpadDeviceId = -2;
const int _kPrimaryMouseButton = 0x1;
const int _kSecondaryMouseButton = 0x2;
const int _kMiddleMouseButton =0x4;
int _nthButton(int n) => 0x1 << n;
/// Convert the `button` property of PointerEvent or MouseEvent to a bit mask of
/// its `buttons` property.
///
/// The `button` property is a integer describing the button changed in an event,
/// which is sequentially 0 for LMB, 1 for MMB, 2 for RMB, 3 for backward and
/// 4 for forward, etc.
///
/// The `buttons` property is a bitfield describing the buttons pressed after an
/// event, which is 0x1 for LMB, 0x4 for MMB, 0x2 for RMB, 0x8 for backward
/// and 0x10 for forward, etc.
@visibleForTesting
int convertButtonToButtons(int button) {
assert(button >= 0, 'Unexpected negative button $button.');
switch(button) {
case 0:
return _kPrimaryMouseButton;
case 1:
return _kMiddleMouseButton;
case 2:
return _kSecondaryMouseButton;
default:
return _nthButton(button);
}
}
/// Wrapping the Safari iOS workaround that adds a dummy event listener
/// More info about the issue and workaround: https://github.com/flutter/flutter/issues/70858
class SafariPointerEventWorkaround {
SafariPointerEventWorkaround._();
DomEventListener? _listener;
void workAroundMissingPointerEvents() {
// We only need to attach the listener once.
if (_listener == null) {
_listener = createDomEventListener((_) {});
domDocument.addEventListener('touchstart', _listener);
}
}
void dispose() {
if (_listener != null) {
domDocument.removeEventListener('touchstart', _listener);
_listener = null;
}
}
}
class PointerBinding {
PointerBinding(
this.view, {
PointerSupportDetector detector = const PointerSupportDetector(),
SafariPointerEventWorkaround? safariWorkaround,
}) : _pointerDataConverter = PointerDataConverter(),
_detector = detector {
if (isIosSafari) {
_safariWorkaround = safariWorkaround ?? _defaultSafariWorkaround;
_safariWorkaround!.workAroundMissingPointerEvents();
}
_adapter = _createAdapter();
assert(() {
registerHotRestartListener(dispose);
return true;
}());
}
static final SafariPointerEventWorkaround _defaultSafariWorkaround = SafariPointerEventWorkaround._();
static final ClickDebouncer clickDebouncer = ClickDebouncer();
/// Resets global pointer state that's not tied to any single [PointerBinding]
/// instance.
@visibleForTesting
static void debugResetGlobalState() {
clickDebouncer.reset();
PointerDataConverter.globalPointerState.reset();
}
SafariPointerEventWorkaround? _safariWorkaround;
/// Performs necessary clean up for PointerBinding including removing event listeners
/// and clearing the existing pointer state
void dispose() {
_adapter.dispose();
_safariWorkaround?.dispose();
}
final EngineFlutterView view;
DomElement get rootElement => view.dom.rootElement;
final PointerSupportDetector _detector;
final PointerDataConverter _pointerDataConverter;
KeyboardConverter? _keyboardConverter = KeyboardBinding.instance?.converter;
late _BaseAdapter _adapter;
@visibleForTesting
void debugOverrideKeyboardConverter(KeyboardConverter? keyboardConverter) {
_keyboardConverter = keyboardConverter;
}
_BaseAdapter _createAdapter() {
if (_detector.hasPointerEvents) {
return _PointerAdapter(this);
}
throw UnsupportedError(
'This browser does not support pointer events which '
'are necessary to handle interactions with Flutter Web apps.',
);
}
}
@visibleForTesting
typedef QueuedEvent = ({ DomEvent event, Duration timeStamp, List<ui.PointerData> data });
@visibleForTesting
typedef DebounceState = ({
DomElement target,
Timer timer,
List<QueuedEvent> queue,
});
/// Disambiguates taps and clicks that are produced both by the framework from
/// `pointerdown`/`pointerup` events and those detected as DOM "click" events by
/// the browser.
///
/// The implementation is waiting for a `pointerdown`, and as soon as it sees
/// one stops forwarding pointer events to the framework, and instead queues
/// them in a list. The queuing process stops as soon as one of the following
/// two conditions happens first:
///
/// * 200ms passes after the `pointerdown` event. Most clicks, even slow ones,
/// are typically done by then. Importantly, screen readers simulate clicks
/// much faster than 200ms. So if the timer expires, it is likely the user is
/// not interested in producing a click, so the debouncing process stops and
/// all queued events are forwarded to the framework. If, for example, a
/// tappable node is inside a scrollable viewport, the events can be
/// intrepreted by the framework to initiate scrolling.
/// * A `click` event arrives. If the event queue has not been flushed to the
/// framework, the event is forwarded to the framework as a
/// `SemanticsAction.tap`, and all the pointer events are dropped. If, by the
/// time the click event arrives, the queue was flushed (but no more than 50ms
/// ago), then the click event is dropped instead under the assumption that
/// the flushed pointer events are interpreted by the framework as the desired
/// gesture.
///
/// This mechanism is in place to deal with https://github.com/flutter/flutter/issues/130162.
class ClickDebouncer {
ClickDebouncer() {
assert(() {
registerHotRestartListener(reset);
return true;
}());
}
DebounceState? _state;
@visibleForTesting
DebounceState? get debugState => _state;
// The timestamp of the last "pointerup" DOM event that was flushed.
//
// Not to be confused with the time when it was flushed. The two may be far
// apart because the flushing can happen after a delay due to timer, or events
// that happen after the said "pointerup".
Duration? _lastFlushedPointerUpTimeStamp;
/// Returns true if the debouncer has a non-empty queue of pointer events that
/// were withheld from the framework.
///
/// This value is normally false, and it flips to true when the first
/// pointerdown is observed that lands on a tappable semantics node, denoted
/// by the presence of the `flt-tappable` attribute.
bool get isDebouncing => _state != null;
/// Processes a pointer event.
///
/// If semantics are off, simply forwards the event to the framework.
///
/// If currently debouncing events (see [isDebouncing]), adds the event to
/// the debounce queue, unless the target of the event is different from the
/// target that initiated the debouncing process, in which case stops
/// debouncing and flushes pointer events to the framework.
///
/// If the event is a `pointerdown` and the target is `flt-tappable`, begins
/// debouncing events.
///
/// In all other situations forwards the event to the framework.
void onPointerData(DomEvent event, List<ui.PointerData> data) {
if (!EnginePlatformDispatcher.instance.semanticsEnabled) {
_sendToFramework(event, data);
return;
}
if (isDebouncing) {
_debounce(event, data);
} else if (event.type == 'pointerdown') {
_startDebouncing(event, data);
} else {
_sendToFramework(event, data);
}
}
/// Notifies the debouncer of the browser-detected "click" DOM event.
///
/// Forwards the event to the framework, unless it is deduplicated because
/// the corresponding pointer down/up events were recently flushed to the
/// framework already.
void onClick(DomEvent click, int semanticsNodeId, bool isListening) {
assert(click.type == 'click');
if (!isDebouncing) {
// There's no pending queue of pointer events that are being debounced. It
// is a standalone click event. Unless pointer down/up were flushed
// recently and if the node is currently listening to event, forward to
// the framework.
if (isListening && _shouldSendClickEventToFramework(click)) {
_sendSemanticsTapToFramework(click, semanticsNodeId);
}
return;
}
if (isListening) {
// There's a pending queue of pointer events. Prefer sending the tap action
// instead of pointer events, because the pointer events may not land on the
// combined semantic node and miss the click/tap.
final DebounceState state = _state!;
_state = null;
state.timer.cancel();
_sendSemanticsTapToFramework(click, semanticsNodeId);
} else {
// The semantic node is not listening to taps. Flush the pointer events
// for the framework to figure out what to do with them. It's possible
// the framework is interested in gestures other than taps.
_flush();
}
}
void _sendSemanticsTapToFramework(DomEvent click, int semanticsNodeId) {
// Tappable nodes can be nested inside other tappable nodes. If a click
// lands on an inner element and is allowed to propagate, it will also
// land on the ancestor tappable, leading to both the descendant and the
// ancestor sending SemanticsAction.tap to the framework, creating a double
// tap/click, which is wrong. More details:
//
// https://github.com/flutter/flutter/issues/134842
click.stopPropagation();
EnginePlatformDispatcher.instance.invokeOnSemanticsAction(
semanticsNodeId, ui.SemanticsAction.tap, null);
}
void _startDebouncing(DomEvent event, List<ui.PointerData> data) {
assert(
_state == null,
'Cannot start debouncing. Already debouncing.'
);
assert(
event.type == 'pointerdown',
'Click debouncing must begin with a pointerdown'
);
final DomEventTarget? target = event.target;
if (target is DomElement && target.hasAttribute('flt-tappable')) {
_state = (
target: target,
// The 200ms duration was chosen empirically by testing tapping, mouse
// clicking, trackpad tapping and clicking, as well as the following
// screen readers: TalkBack on Android, VoiceOver on macOS, Narrator/
// NVDA/JAWS on Windows. 200ms seemed to hit the sweet spot by
// satisfying the following:
// * It was short enough that delaying the `pointerdown` still allowed
// drag gestures to begin reasonably soon (e.g. scrolling).
// * It was long enough to register taps and clicks.
// * It was successful at detecting taps generated by all tested
// screen readers.
timer: Timer(const Duration(milliseconds: 200), _onTimerExpired),
queue: <QueuedEvent>[(
event: event,
timeStamp: _BaseAdapter._eventTimeStampToDuration(event.timeStamp!),
data: data,
)],
);
} else {
// The event landed on an non-tappable target. Assume this won't lead to
// double clicks and forward the event to the framework.
_sendToFramework(event, data);
}
}
void _debounce(DomEvent event, List<ui.PointerData> data) {
assert(
_state != null,
'Cannot debounce event. Debouncing state not established by _startDebouncing.'
);
final DebounceState state = _state!;
state.queue.add((
event: event,
timeStamp: _BaseAdapter._eventTimeStampToDuration(event.timeStamp!),
data: data,
));
// It's only interesting to debounce clicks when both `pointerdown` and
// `pointerup` land on the same element.
if (event.type == 'pointerup') {
// TODO(yjbanov): this is a bit mouthful, but see https://github.com/dart-lang/sdk/issues/53070
final DomEventTarget? eventTarget = event.target;
final DomElement stateTarget = state.target;
final bool targetChanged = eventTarget != stateTarget;
if (targetChanged) {
_flush();
}
}
}
void _onTimerExpired() {
if (!isDebouncing) {
return;
}
_flush();
}
// If the click event happens soon after the last `pointerup` event that was
// already flushed to the framework, the click event is dropped to avoid
// double click.
bool _shouldSendClickEventToFramework(DomEvent click) {
final Duration? lastFlushedPointerUpTimeStamp = _lastFlushedPointerUpTimeStamp;
if (lastFlushedPointerUpTimeStamp == null) {
// We haven't seen a pointerup. It's standalone click event. Let it through.
return true;
}
final Duration clickTimeStamp = _BaseAdapter._eventTimeStampToDuration(click.timeStamp!);
final Duration delta = clickTimeStamp - lastFlushedPointerUpTimeStamp;
return delta >= const Duration(milliseconds: 50);
}
void _flush() {
assert(_state != null);
final DebounceState state = _state!;
state.timer.cancel();
final List<ui.PointerData> aggregateData = <ui.PointerData>[];
for (final QueuedEvent queuedEvent in state.queue) {
if (queuedEvent.event.type == 'pointerup') {
_lastFlushedPointerUpTimeStamp = queuedEvent.timeStamp;
}
aggregateData.addAll(queuedEvent.data);
}
_sendToFramework(null, aggregateData);
_state = null;
}
void _sendToFramework(DomEvent? event, List<ui.PointerData> data) {
final ui.PointerDataPacket packet = ui.PointerDataPacket(data: data.toList());
if (_debugLogFlutterEvents) {
for(final ui.PointerData datum in data) {
print('fw:${datum.change} ${datum.physicalX},${datum.physicalY}');
}
}
EnginePlatformDispatcher.instance.invokeOnPointerDataPacket(packet);
}
/// Cancels any pending debounce process and forgets anything that happened so
/// far.
///
/// This object can be used as if it was just initialized.
void reset() {
_state?.timer.cancel();
_state = null;
_lastFlushedPointerUpTimeStamp = null;
}
}
class PointerSupportDetector {
const PointerSupportDetector();
bool get hasPointerEvents => hasJsProperty(domWindow, 'PointerEvent');
@override
String toString() => 'pointers:$hasPointerEvents';
}
class _Listener {
_Listener._({
required this.event,
required this.target,
required this.handler,
});
/// Registers a listener for the given `event` on a `target`.
///
/// If `passive` is null uses the default behavior determined by the event
/// type. If `passive` is true, marks the handler as non-blocking for the
/// built-in browser behavior. This means the browser will not wait for the
/// handler to finish execution before performing the default action
/// associated with this event. If `passive` is false, the browser will wait
/// for the handler to finish execution before performing the respective
/// action.
factory _Listener.register({
required String event,
required DomEventTarget target,
required DartDomEventListener handler,
bool? passive,
}) {
final DomEventListener jsHandler = createDomEventListener(handler);
if (passive == null) {
target.addEventListener(event, jsHandler);
} else {
final Map<String, Object> eventOptions = <String, Object>{
'passive': passive,
};
target.addEventListenerWithOptions(event, jsHandler, eventOptions);
}
final _Listener listener = _Listener._(
event: event,
target: target,
handler: jsHandler,
);
target.addEventListener(event, jsHandler);
return listener;
}
final String event;
final DomEventTarget target;
final DomEventListener handler;
void unregister() {
target.removeEventListener(event, handler);
}
}
/// Common functionality that's shared among adapters.
abstract class _BaseAdapter {
_BaseAdapter(this._owner) {
setup();
}
final PointerBinding _owner;
EngineFlutterView get _view => _owner.view;
_PointerDataCallback get _callback => PointerBinding.clickDebouncer.onPointerData;
PointerDataConverter get _pointerDataConverter => _owner._pointerDataConverter;
KeyboardConverter? get _keyboardConverter => _owner._keyboardConverter;
final List<_Listener> _listeners = <_Listener>[];
DomWheelEvent? _lastWheelEvent;
bool _lastWheelEventWasTrackpad = false;
DomEventTarget get _viewTarget => _view.dom.rootElement;
DomEventTarget get _globalTarget => _view.embeddingStrategy.globalEventTarget;
/// Each subclass is expected to override this method to attach its own event
/// listeners and convert events into pointer events.
void setup();
/// Cleans up all event listeners attached by this adapter.
void dispose() {
for (final _Listener listener in _listeners) {
listener.unregister();
}
_listeners.clear();
}
/// Adds a listener for the given [eventName] to [target].
///
/// Generally speaking, down and leave events should use [_rootElement]
/// as the [target], while move and up events should use [domWindow]
/// instead, because the browser doesn't fire the latter two for DOM elements
/// when the pointer is outside the window.
void addEventListener(
DomEventTarget target,
String eventName,
DartDomEventListener handler,
) {
JSVoid loggedHandler(DomEvent event) {
if (_debugLogPointerEvents) {
if (domInstanceOfString(event, 'PointerEvent')) {
final DomPointerEvent pointerEvent = event as DomPointerEvent;
final ui.Offset offset = computeEventOffsetToTarget(event, _view);
print('${pointerEvent.type} '
'${offset.dx.toStringAsFixed(1)},'
'${offset.dy.toStringAsFixed(1)}');
} else {
print(event.type);
}
}
// Report the event to semantics. This information is used to debounce
// browser gestures. Semantics tells us whether it is safe to forward
// the event to the framework.
if (EngineSemantics.instance.receiveGlobalEvent(event)) {
handler(event);
}
}
_listeners.add(_Listener.register(
event: eventName,
target: target,
handler: loggedHandler,
));
}
/// Converts a floating number timestamp (in milliseconds) to a [Duration] by
/// splitting it into two integer components: milliseconds + microseconds.
static Duration _eventTimeStampToDuration(num milliseconds) {
final int ms = milliseconds.toInt();
final int micro =
((milliseconds - ms) * Duration.microsecondsPerMillisecond).toInt();
return Duration(milliseconds: ms, microseconds: micro);
}
}
mixin _WheelEventListenerMixin on _BaseAdapter {
static double? _defaultScrollLineHeight;
bool _isAcceleratedMouseWheelDelta(num delta, num? wheelDelta) {
// On macOS, scrolling using a mouse wheel by default uses an acceleration
// curve, so delta values ramp up and are not at fixed multiples of 120.
// But in this case, the wheelDelta properties of the event still keep
// their original values.
// For all events without this acceleration curve applied, the wheelDelta
// values are by convention three times greater than the delta values and with
// the opposite sign.
if (wheelDelta == null) {
return false;
}
// Account for observed issues with integer truncation by allowing +-1px error.
return (wheelDelta - (-3 * delta)).abs() > 1;
}
bool _isTrackpadEvent(DomWheelEvent event) {
// This function relies on deprecated and non-standard implementation
// details. Useful reference material can be found below.
//
// https://source.chromium.org/chromium/chromium/src/+/main:ui/events/event.cc
// https://source.chromium.org/chromium/chromium/src/+/main:ui/events/cocoa/events_mac.mm
// https://github.com/WebKit/WebKit/blob/main/Source/WebCore/platform/mac/PlatformEventFactoryMac.mm
// https://searchfox.org/mozilla-central/source/dom/events/WheelEvent.h
// https://learn.microsoft.com/en-us/windows/win32/inputdev/wm-mousewheel
if (browserEngine == BrowserEngine.firefox) {
// Firefox has restricted the wheelDelta properties, they do not provide
// enough information to accurately disambiguate trackpad events from mouse
// wheel events.
return false;
}
if (_isAcceleratedMouseWheelDelta(event.deltaX, event.wheelDeltaX) ||
_isAcceleratedMouseWheelDelta(event.deltaY, event.wheelDeltaY)) {
return false;
}
if (((event.deltaX % 120 == 0) && (event.deltaY % 120 == 0)) ||
(((event.wheelDeltaX ?? 1) % 120 == 0) && ((event.wheelDeltaY ?? 1) % 120) == 0)) {
// While not in any formal web standard, `blink` and `webkit` browsers use
// a delta of 120 to represent one mouse wheel turn. If both dimensions of
// the delta are divisible by 120, this event is probably from a mouse.
// Checking if wheelDeltaX and wheelDeltaY are both divisible by 120
// catches any macOS accelerated mouse wheel deltas which by random chance
// are not caught by _isAcceleratedMouseWheelDelta.
final num deltaXChange = (event.deltaX - (_lastWheelEvent?.deltaX ?? 0)).abs();
final num deltaYChange = (event.deltaY - (_lastWheelEvent?.deltaY ?? 0)).abs();
if ((_lastWheelEvent == null) ||
(deltaXChange == 0 && deltaYChange == 0) ||
!(deltaXChange < 20 && deltaYChange < 20)) {
// A trackpad event might by chance have a delta of exactly 120, so
// make sure this event does not have a similar delta to the previous
// one before calling it a mouse event.
if (event.timeStamp != null && _lastWheelEvent?.timeStamp != null) {
// If the event has a large delta to the previous event, check if
// it was preceded within 50 milliseconds by a trackpad event. This
// handles unlucky 120-delta trackpad events during rapid movement.
final num diffMs = event.timeStamp! - _lastWheelEvent!.timeStamp!;
if (diffMs < 50 && _lastWheelEventWasTrackpad) {
return true;
}
}
return false;
}
}
return true;
}
List<ui.PointerData> _convertWheelEventToPointerData(
DomWheelEvent event
) {
const int domDeltaPixel = 0x00;
const int domDeltaLine = 0x01;
const int domDeltaPage = 0x02;
ui.PointerDeviceKind kind = ui.PointerDeviceKind.mouse;
int deviceId = _mouseDeviceId;
if (_isTrackpadEvent(event)) {
kind = ui.PointerDeviceKind.trackpad;
deviceId = _trackpadDeviceId;
}
// Flutter only supports pixel scroll delta. Convert deltaMode values
// to pixels.
double deltaX = event.deltaX;
double deltaY = event.deltaY;
switch (event.deltaMode.toInt()) {
case domDeltaLine:
_defaultScrollLineHeight ??= _computeDefaultScrollLineHeight();
deltaX *= _defaultScrollLineHeight!;
deltaY *= _defaultScrollLineHeight!;
case domDeltaPage:
deltaX *= _view.physicalSize.width;
deltaY *= _view.physicalSize.height;
case domDeltaPixel:
if (operatingSystem == OperatingSystem.macOs) {
// Safari and Firefox seem to report delta in logical pixels while
// Chrome uses physical pixels.
deltaX *= _view.devicePixelRatio;
deltaY *= _view.devicePixelRatio;
}
default:
break;
}
final List<ui.PointerData> data = <ui.PointerData>[];
final ui.Offset offset = computeEventOffsetToTarget(event, _view);
bool ignoreCtrlKey = false;
if (operatingSystem == OperatingSystem.macOs) {
ignoreCtrlKey = (_keyboardConverter?.keyIsPressed(kPhysicalControlLeft) ?? false) ||
(_keyboardConverter?.keyIsPressed(kPhysicalControlRight) ?? false);
}
if (event.ctrlKey && !ignoreCtrlKey) {
_pointerDataConverter.convert(
data,
viewId: _view.viewId,
change: ui.PointerChange.hover,
timeStamp: _BaseAdapter._eventTimeStampToDuration(event.timeStamp!),
kind: kind,
signalKind: ui.PointerSignalKind.scale,
device: deviceId,
physicalX: offset.dx * _view.devicePixelRatio,
physicalY: offset.dy * _view.devicePixelRatio,
buttons: event.buttons!.toInt(),
pressure: 1.0,
pressureMax: 1.0,
scale: math.exp(-deltaY / 200),
);
} else {
_pointerDataConverter.convert(
data,
viewId: _view.viewId,
change: ui.PointerChange.hover,
timeStamp: _BaseAdapter._eventTimeStampToDuration(event.timeStamp!),
kind: kind,
signalKind: ui.PointerSignalKind.scroll,
device: deviceId,
physicalX: offset.dx * _view.devicePixelRatio,
physicalY: offset.dy * _view.devicePixelRatio,
buttons: event.buttons!.toInt(),
pressure: 1.0,
pressureMax: 1.0,
scrollDeltaX: deltaX,
scrollDeltaY: deltaY,
);
}
_lastWheelEvent = event;
_lastWheelEventWasTrackpad = kind == ui.PointerDeviceKind.trackpad;
return data;
}
void _addWheelEventListener(DartDomEventListener handler) {
_listeners.add(_Listener.register(
event: 'wheel',
target: _viewTarget,
handler: handler,
passive: false,
));
}
void _handleWheelEvent(DomEvent e) {
assert(domInstanceOfString(e, 'WheelEvent'));
final DomWheelEvent event = e as DomWheelEvent;
if (_debugLogPointerEvents) {
print(event.type);
}
_callback(e, _convertWheelEventToPointerData(event));
// Prevent default so mouse wheel event doesn't get converted to
// a scroll event that semantic nodes would process.
//
event.preventDefault();
}
/// For browsers that report delta line instead of pixels such as FireFox
/// compute line height using the default font size.
///
/// Use Firefox to test this code path.
double _computeDefaultScrollLineHeight() {
const double kFallbackFontHeight = 16.0;
final DomHTMLDivElement probe = createDomHTMLDivElement();
probe.style
..fontSize = 'initial'
..display = 'none';
domDocument.body!.append(probe);
String fontSize = domWindow.getComputedStyle(probe).fontSize;
double? res;
if (fontSize.contains('px')) {
fontSize = fontSize.replaceAll('px', '');
res = double.tryParse(fontSize);
}
probe.remove();
return res == null ? kFallbackFontHeight : res / 4.0;
}
}
@immutable
class _SanitizedDetails {
const _SanitizedDetails({
required this.buttons,
required this.change,
});
final ui.PointerChange change;
final int buttons;
@override
String toString() => '$runtimeType(change: $change, buttons: $buttons)';
}
class _ButtonSanitizer {
int _pressedButtons = 0;
/// Transform [DomPointerEvent.buttons] to Flutter's PointerEvent buttons.
int _htmlButtonsToFlutterButtons(int buttons) {
// Flutter's button definition conveniently matches that of JavaScript
// from primary button (0x1) to forward button (0x10), which allows us to
// avoid transforming it bit by bit.
return buttons & _kButtonsMask;
}
/// Given [DomPointerEvent.button] and [DomPointerEvent.buttons], tries to
/// infer the correct value for Flutter buttons.
int _inferDownFlutterButtons(int button, int buttons) {
if (buttons == 0 && button > -1) {
// In some cases, the browser sends `buttons:0` in a down event. In such
// case, we try to infer the value from `button`.
buttons = convertButtonToButtons(button);
}
return _htmlButtonsToFlutterButtons(buttons);
}
_SanitizedDetails sanitizeDownEvent({
required int button,
required int buttons,
}) {
// If the pointer is already down, we just send a move event with the new
// `buttons` value.
if (_pressedButtons != 0) {
return sanitizeMoveEvent(buttons: buttons);
}
_pressedButtons = _inferDownFlutterButtons(button, buttons);
return _SanitizedDetails(
change: ui.PointerChange.down,
buttons: _pressedButtons,
);
}
_SanitizedDetails sanitizeMoveEvent({required int buttons}) {
final int newPressedButtons = _htmlButtonsToFlutterButtons(buttons);
// This could happen when the user clicks RMB then moves the mouse quickly.
// The brower sends a move event with `buttons:2` even though there's no
// buttons down yet.
if (_pressedButtons == 0 && newPressedButtons != 0) {
return _SanitizedDetails(
change: ui.PointerChange.hover,
buttons: _pressedButtons,
);
}
_pressedButtons = newPressedButtons;
return _SanitizedDetails(
change: _pressedButtons == 0
? ui.PointerChange.hover
: ui.PointerChange.move,
buttons: _pressedButtons,
);
}
_SanitizedDetails? sanitizeMissingRightClickUp({required int buttons}) {
final int newPressedButtons = _htmlButtonsToFlutterButtons(buttons);
// This could happen when RMB is clicked and released but no pointerup
// event was received because context menu was shown.
if (_pressedButtons != 0 && newPressedButtons == 0) {
_pressedButtons = 0;
return _SanitizedDetails(
change: ui.PointerChange.up,
buttons: _pressedButtons,
);
}
return null;
}
_SanitizedDetails? sanitizeLeaveEvent({required int buttons}) {
final int newPressedButtons = _htmlButtonsToFlutterButtons(buttons);
// The move event already handles the case where the pointer is currently
// down, in which case handling the leave event as well is superfluous.
if (newPressedButtons == 0) {
_pressedButtons = 0;
return _SanitizedDetails(
change: ui.PointerChange.hover,
buttons: _pressedButtons,
);
}
return null;
}
_SanitizedDetails? sanitizeUpEvent({required int? buttons}) {
// The pointer could have been released by a `pointerout` event, in which
// case `pointerup` should have no effect.
if (_pressedButtons == 0) {
return null;
}
_pressedButtons = _htmlButtonsToFlutterButtons(buttons ?? 0);
if (_pressedButtons == 0) {
// All buttons have been released.
return _SanitizedDetails(
change: ui.PointerChange.up,
buttons: _pressedButtons,
);
} else {
// There are still some unreleased buttons, we shouldn't send an up event
// yet. Instead we send a move event to update the position of the pointer.
return _SanitizedDetails(
change: ui.PointerChange.move,
buttons: _pressedButtons,
);
}
}
_SanitizedDetails sanitizeCancelEvent() {
_pressedButtons = 0;
return _SanitizedDetails(
change: ui.PointerChange.cancel,
buttons: _pressedButtons,
);
}
}
typedef _PointerEventListener = dynamic Function(DomPointerEvent event);
/// Adapter class to be used with browsers that support native pointer events.
///
/// For the difference between MouseEvent and PointerEvent, see _MouseAdapter.
class _PointerAdapter extends _BaseAdapter with _WheelEventListenerMixin {
_PointerAdapter(super.owner);
final Map<int, _ButtonSanitizer> _sanitizers = <int, _ButtonSanitizer>{};
@visibleForTesting
Iterable<int> debugTrackedDevices() => _sanitizers.keys;
_ButtonSanitizer _ensureSanitizer(int device) {
return _sanitizers.putIfAbsent(device, () => _ButtonSanitizer());
}
_ButtonSanitizer _getSanitizer(int device) {
assert(_sanitizers[device] != null);
return _sanitizers[device]!;
}
bool _hasSanitizer(int device) {
return _sanitizers.containsKey(device);
}
void _removePointerIfUnhoverable(DomPointerEvent event) {
if (event.pointerType == 'touch') {
_sanitizers.remove(event.pointerId);
}
}
void _addPointerEventListener(
DomEventTarget target,
String eventName,
_PointerEventListener handler, {
bool checkModifiers = true,
}) {
addEventListener(target, eventName, (DomEvent event) {
final DomPointerEvent pointerEvent = event as DomPointerEvent;
if (checkModifiers) {
_checkModifiersState(event);
}
handler(pointerEvent);
});
}
void _checkModifiersState(DomPointerEvent event) {
_keyboardConverter?.synthesizeModifiersIfNeeded(
event.getModifierState('Alt'),
event.getModifierState('Control'),
event.getModifierState('Meta'),
event.getModifierState('Shift'),
event.timeStamp!,
);
}
@override
void setup() {
_addPointerEventListener(_viewTarget, 'pointerdown', (DomPointerEvent event) {
final int device = _getPointerId(event);
final List<ui.PointerData> pointerData = <ui.PointerData>[];
final _ButtonSanitizer sanitizer = _ensureSanitizer(device);
final _SanitizedDetails? up =
sanitizer.sanitizeMissingRightClickUp(buttons: event.buttons!.toInt());
if (up != null) {
_convertEventsToPointerData(data: pointerData, event: event, details: up);
}
final _SanitizedDetails down =
sanitizer.sanitizeDownEvent(
button: event.button.toInt(),
buttons: event.buttons!.toInt(),
);
_convertEventsToPointerData(data: pointerData, event: event, details: down);
_callback(event, pointerData);
});
// Why `domWindow` you ask? See this fiddle: https://jsfiddle.net/ditman/7towxaqp
_addPointerEventListener(_globalTarget, 'pointermove', (DomPointerEvent event) {
final int device = _getPointerId(event);
final _ButtonSanitizer sanitizer = _ensureSanitizer(device);
final List<ui.PointerData> pointerData = <ui.PointerData>[];
final List<DomPointerEvent> expandedEvents = _expandEvents(event);
for (final DomPointerEvent event in expandedEvents) {
final _SanitizedDetails? up = sanitizer.sanitizeMissingRightClickUp(buttons: event.buttons!.toInt());
if (up != null) {
_convertEventsToPointerData(data: pointerData, event: event, details: up);
}
final _SanitizedDetails move = sanitizer.sanitizeMoveEvent(buttons: event.buttons!.toInt());
_convertEventsToPointerData(data: pointerData, event: event, details: move);
}
_callback(event, pointerData);
});
_addPointerEventListener(_viewTarget, 'pointerleave', (DomPointerEvent event) {
final int device = _getPointerId(event);
final _ButtonSanitizer sanitizer = _ensureSanitizer(device);
final List<ui.PointerData> pointerData = <ui.PointerData>[];
final _SanitizedDetails? details = sanitizer.sanitizeLeaveEvent(buttons: event.buttons!.toInt());
if (details != null) {
_convertEventsToPointerData(data: pointerData, event: event, details: details);
_callback(event, pointerData);
}
}, checkModifiers: false);
// TODO(dit): This must happen in the flutterViewElement, https://github.com/flutter/flutter/issues/116561
_addPointerEventListener(_globalTarget, 'pointerup', (DomPointerEvent event) {
final int device = _getPointerId(event);
if (_hasSanitizer(device)) {
final List<ui.PointerData> pointerData = <ui.PointerData>[];
final _SanitizedDetails? details = _getSanitizer(device).sanitizeUpEvent(buttons: event.buttons?.toInt());
_removePointerIfUnhoverable(event);
if (details != null) {
_convertEventsToPointerData(data: pointerData, event: event, details: details);
_callback(event, pointerData);
}
}
});
// TODO(dit): Synthesize a "cancel" event when 'pointerup' happens outside of the flutterViewElement, https://github.com/flutter/flutter/issues/116561
// A browser fires cancel event if it concludes the pointer will no longer
// be able to generate events (example: device is deactivated)
_addPointerEventListener(_viewTarget, 'pointercancel', (DomPointerEvent event) {
final int device = _getPointerId(event);
if (_hasSanitizer(device)) {
final List<ui.PointerData> pointerData = <ui.PointerData>[];
final _SanitizedDetails details = _getSanitizer(device).sanitizeCancelEvent();
_removePointerIfUnhoverable(event);
_convertEventsToPointerData(data: pointerData, event: event, details: details);
_callback(event, pointerData);
}
}, checkModifiers: false);
_addWheelEventListener((DomEvent event) {
_handleWheelEvent(event);
});
}
// For each event that is de-coalesced from `event` and described in
// `details`, convert it to pointer data and store in `data`.
void _convertEventsToPointerData({
required List<ui.PointerData> data,
required DomPointerEvent event,
required _SanitizedDetails details,
}) {
final ui.PointerDeviceKind kind = _pointerTypeToDeviceKind(event.pointerType!);
final double tilt = _computeHighestTilt(event);
final Duration timeStamp = _BaseAdapter._eventTimeStampToDuration(event.timeStamp!);
final num? pressure = event.pressure;
final ui.Offset offset = computeEventOffsetToTarget(event, _view);
_pointerDataConverter.convert(
data,
viewId: _view.viewId,
change: details.change,
timeStamp: timeStamp,
kind: kind,
signalKind: ui.PointerSignalKind.none,
device: _getPointerId(event),
physicalX: offset.dx * _view.devicePixelRatio,
physicalY: offset.dy * _view.devicePixelRatio,
buttons: details.buttons,
pressure: pressure == null ? 0.0 : pressure.toDouble(),
pressureMax: 1.0,
tilt: tilt,
);
}
List<DomPointerEvent> _expandEvents(DomPointerEvent event) {
// For browsers that don't support `getCoalescedEvents`, we fallback to
// using the original event.
if (hasJsProperty(event, 'getCoalescedEvents')) {
final List<DomPointerEvent> coalescedEvents =
event.getCoalescedEvents().cast<DomPointerEvent>();
// Some events don't perform coalescing, so they return an empty list. In
// that case, we also fallback to using the original event.
if (coalescedEvents.isNotEmpty) {
return coalescedEvents;
}
}
// Important: coalesced events lack the `eventTarget` property (because they're
// being handled in a deferred way).
//
// See the "Note" here: https://developer.mozilla.org/en-US/docs/Web/API/Event/currentTarget
return <DomPointerEvent>[event];
}
ui.PointerDeviceKind _pointerTypeToDeviceKind(String pointerType) {
switch (pointerType) {
case 'mouse':
return ui.PointerDeviceKind.mouse;
case 'pen':
return ui.PointerDeviceKind.stylus;
case 'touch':
return ui.PointerDeviceKind.touch;
default:
return ui.PointerDeviceKind.unknown;
}
}
int _getPointerId(DomPointerEvent event) {
// We force `device: _mouseDeviceId` on mouse pointers because Wheel events
// might come before any PointerEvents, and since wheel events don't contain
// pointerId we always assign `device: _mouseDeviceId` to them.
final ui.PointerDeviceKind kind = _pointerTypeToDeviceKind(event.pointerType!);
return kind == ui.PointerDeviceKind.mouse ? _mouseDeviceId :
event.pointerId!.toInt();
}
/// Tilt angle is -90 to + 90. Take maximum deflection and convert to radians.
double _computeHighestTilt(DomPointerEvent e) =>
(e.tiltX!.abs() > e.tiltY!.abs() ? e.tiltX : e.tiltY)! /
180.0 *
math.pi;
}
| engine/lib/web_ui/lib/src/engine/pointer_binding.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/pointer_binding.dart",
"repo_id": "engine",
"token_count": 14116
} | 296 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import '../dom.dart';
import 'semantics.dart';
/// Represents semantic objects that deliver information in a visual manner.
///
/// Uses aria img role to convey this semantic information to the element.
///
/// Screen-readers takes advantage of "aria-label" to describe the visual.
class ImageRoleManager extends PrimaryRoleManager {
ImageRoleManager(SemanticsObject semanticsObject)
: super.blank(PrimaryRole.image, semanticsObject) {
// The following secondary roles can coexist with images. `LabelAndValue` is
// not used because this role manager uses special auxiliary elements to
// supply ARIA labels.
// TODO(yjbanov): reevaluate usage of aux elements, https://github.com/flutter/flutter/issues/129317
addFocusManagement();
addLiveRegion();
addRouteName();
addTappable();
}
@override
bool focusAsRouteDefault() => focusable?.focusAsRouteDefault() ?? false;
/// The element with role="img" and aria-label could block access to all
/// children elements, therefore create an auxiliary element and describe the
/// image in that if the semantic object have child nodes.
DomElement? _auxiliaryImageElement;
@override
void update() {
super.update();
if (semanticsObject.isVisualOnly && semanticsObject.hasChildren) {
if (_auxiliaryImageElement == null) {
_auxiliaryImageElement = domDocument.createElement('flt-semantics-img');
// Absolute positioning and sizing of leaf text elements confuses
// VoiceOver. So we let the browser size the value node. The node will
// still have a bigger tap area. However, if the node is a parent to
// other nodes, then VoiceOver behaves as expected with absolute
// positioning and sizing.
if (semanticsObject.hasChildren) {
_auxiliaryImageElement!.style
..position = 'absolute'
..top = '0'
..left = '0'
..width = '${semanticsObject.rect!.width}px'
..height = '${semanticsObject.rect!.height}px';
}
_auxiliaryImageElement!.style.fontSize = '6px';
append(_auxiliaryImageElement!);
}
_auxiliaryImageElement!.setAttribute('role', 'img');
_setLabel(_auxiliaryImageElement);
} else if (semanticsObject.isVisualOnly) {
setAriaRole('img');
_setLabel(element);
_cleanUpAuxiliaryElement();
} else {
_cleanUpAuxiliaryElement();
_cleanupElement();
}
}
void _setLabel(DomElement? element) {
if (semanticsObject.hasLabel) {
element!.setAttribute('aria-label', semanticsObject.label!);
}
}
void _cleanUpAuxiliaryElement() {
if (_auxiliaryImageElement != null) {
_auxiliaryImageElement!.remove();
_auxiliaryImageElement = null;
}
}
void _cleanupElement() {
removeAttribute('aria-label');
}
@override
void dispose() {
super.dispose();
_cleanUpAuxiliaryElement();
_cleanupElement();
}
}
| engine/lib/web_ui/lib/src/engine/semantics/image.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/semantics/image.dart",
"repo_id": "engine",
"token_count": 1064
} | 297 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:convert';
import 'dart:typed_data';
class ShaderData {
ShaderData({
required this.source,
required this.uniforms,
required this.floatCount,
required this.textureCount,
});
factory ShaderData.fromBytes(Uint8List data) {
final String contents = utf8.decode(data);
final Object? rawShaderData = json.decode(contents);
if (rawShaderData is! Map<String, Object?>) {
throw const FormatException('Invalid Shader Data');
}
final Object? root = rawShaderData['sksl'];
if (root is! Map<String, Object?>) {
throw const FormatException('Invalid Shader Data');
}
final Object? source = root['shader'];
final Object? rawUniforms = root['uniforms'];
if (source is! String || rawUniforms is! List<Object?>) {
throw const FormatException('Invalid Shader Data');
}
final List<UniformData> uniforms = List<UniformData>.filled(rawUniforms.length, UniformData.empty);
int textureCount = 0;
int floatCount = 0;
for (int i = 0; i < rawUniforms.length; i += 1) {
final Object? rawUniformData = rawUniforms[i];
if (rawUniformData is! Map<String, Object?>) {
throw const FormatException('Invalid Shader Data');
}
final Object? name = rawUniformData['name'];
final Object? location = rawUniformData['location'];
final Object? rawType = rawUniformData['type'];
if (name is! String || location is! int || rawType is! int) {
throw const FormatException('Invalid Shader Data');
}
final UniformType? type = uniformTypeFromJson(rawType);
if (type == null) {
throw const FormatException('Invalid Shader Data');
}
if (type == UniformType.SampledImage) {
textureCount += 1;
} else {
final Object? bitWidth = rawUniformData['bit_width'];
final Object? arrayElements = rawUniformData['array_elements'];
final Object? rows = rawUniformData['rows'];
final Object? columns = rawUniformData['columns'];
if (bitWidth is! int ||
rows is! int ||
arrayElements is! int ||
columns is! int) {
throw const FormatException('Invalid Shader Data');
}
final int units = rows * columns;
int value = (bitWidth ~/ 32) * units;
if (arrayElements > 1) {
value *= arrayElements;
}
floatCount += value;
}
uniforms[i] = UniformData(
name: name,
location: location,
type: type,
);
}
return ShaderData(
source: source,
uniforms: uniforms,
floatCount: floatCount,
textureCount: textureCount,
);
}
String source;
List<UniformData> uniforms;
int floatCount;
int textureCount;
}
class UniformData {
const UniformData({
required this.name,
required this.location,
required this.type,
});
final String name;
final UniformType type;
final int location;
static const UniformData empty =
UniformData(name: '', location: -1, type: UniformType.Float);
}
enum UniformType {
Boolean,
SByte,
UByte,
Short,
UShort,
Int,
Uint,
Int64,
Uint64,
Half,
Float,
Double,
SampledImage,
}
UniformType? uniformTypeFromJson(int value) {
switch (value) {
case 0:
return UniformType.Boolean;
case 1:
return UniformType.SByte;
case 2:
return UniformType.UByte;
case 3:
return UniformType.Short;
case 4:
return UniformType.UShort;
case 5:
return UniformType.Int;
case 6:
return UniformType.Uint;
case 7:
return UniformType.Int64;
case 8:
return UniformType.Uint64;
case 9:
return UniformType.Half;
case 10:
return UniformType.Float;
case 11:
return UniformType.Double;
case 12:
return UniformType.SampledImage;
}
return null;
}
| engine/lib/web_ui/lib/src/engine/shader_data.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/shader_data.dart",
"repo_id": "engine",
"token_count": 1600
} | 298 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@DefaultAsset('skwasm')
library skwasm_impl;
import 'dart:ffi';
import 'package:ui/src/engine/skwasm/skwasm_impl.dart';
final class RawImageFilter extends Opaque {}
typedef ImageFilterHandle = Pointer<RawImageFilter>;
final class RawColorFilter extends Opaque {}
typedef ColorFilterHandle = Pointer<RawColorFilter>;
final class RawMaskFilter extends Opaque {}
typedef MaskFilterHandle = Pointer<RawMaskFilter>;
@Native<ImageFilterHandle Function(
Float,
Float,
Int
)>(symbol: 'imageFilter_createBlur', isLeaf: true)
external ImageFilterHandle imageFilterCreateBlur(
double sigmaX,
double sigmaY,
int tileMode,
);
@Native<ImageFilterHandle Function(
Float,
Float,
)>(symbol: 'imageFilter_createDilate', isLeaf: true)
external ImageFilterHandle imageFilterCreateDilate(
double radiusX,
double radiusY,
);
@Native<ImageFilterHandle Function(
Float,
Float,
)>(symbol: 'imageFilter_createErode', isLeaf: true)
external ImageFilterHandle imageFilterCreateErode(
double radiusX,
double radiusY,
);
@Native<ImageFilterHandle Function(
Pointer<Float>,
Int,
)>(symbol: 'imageFilter_createMatrix', isLeaf: true)
external ImageFilterHandle imageFilterCreateMatrix(
Pointer<Float> matrix33,
int quality,
);
@Native<ImageFilterHandle Function(ColorFilterHandle)>(
symbol: 'imageFilter_createFromColorFilter', isLeaf: true)
external ImageFilterHandle imageFilterCreateFromColorFilter(
ColorFilterHandle colorFilte
);
@Native<ImageFilterHandle Function(
ImageFilterHandle,
ImageFilterHandle,
)>(symbol: 'imageFilter_compose', isLeaf: true)
external ImageFilterHandle imageFilterCompose(
ImageFilterHandle outer,
ImageFilterHandle inner,
);
@Native<Void Function(ImageFilterHandle)>(symbol: 'imageFilter_dispose', isLeaf: true)
external void imageFilterDispose(ImageFilterHandle handle);
@Native<Void Function(ImageFilterHandle, RawIRect)>(
symbol: 'imageFilter_getFilterBounds', isLeaf: true)
external void imageFilterGetFilterBounds(ImageFilterHandle handle, RawIRect inOutRect);
@Native<ColorFilterHandle Function(
Int,
Int
)>(symbol: 'colorFilter_createMode', isLeaf: true)
external ColorFilterHandle colorFilterCreateMode(int color, int mode);
@Native<ColorFilterHandle Function(
Pointer<Float>
)>(symbol: 'colorFilter_createMatrix', isLeaf: true)
external ColorFilterHandle colorFilterCreateMatrix(Pointer<Float> matrix);
@Native<ColorFilterHandle Function()>(symbol: 'colorFilter_createSRGBToLinearGamma', isLeaf: true)
external ColorFilterHandle colorFilterCreateSRGBToLinearGamma();
@Native<ColorFilterHandle Function()>(symbol: 'colorFilter_createLinearToSRGBGamma', isLeaf: true)
external ColorFilterHandle colorFilterCreateLinearToSRGBGamma();
@Native<ColorFilterHandle Function(
ColorFilterHandle,
ColorFilterHandle
)>(symbol: 'colorFilter_compose', isLeaf: true)
external ColorFilterHandle colorFilterCompose(
ColorFilterHandle outer,
ColorFilterHandle inner,
);
@Native<Void Function(ColorFilterHandle)>(symbol: 'colorFilter_dispose', isLeaf: true)
external void colorFilterDispose(ColorFilterHandle handle);
@Native<MaskFilterHandle Function(
Int,
Float,
)>(symbol: 'maskFilter_createBlur', isLeaf: true)
external MaskFilterHandle maskFilterCreateBlur(
int blurStyle,
double sigma,
);
@Native<Void Function(MaskFilterHandle)>(symbol: 'maskFilter_dispose', isLeaf: true)
external void maskFilterDispose(MaskFilterHandle handle);
| engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/raw_filters.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/raw_filters.dart",
"repo_id": "engine",
"token_count": 1089
} | 299 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@DefaultAsset('skwasm')
library skwasm_impl;
import 'dart:ffi';
import 'package:ui/src/engine/skwasm/skwasm_impl.dart';
final class RawParagraph extends Opaque {}
typedef ParagraphHandle = Pointer<RawParagraph>;
final class RawTextBoxList extends Opaque {}
typedef TextBoxListHandle = Pointer<RawTextBoxList>;
final class RawUnicodePositionBuffer extends Opaque {}
typedef UnicodePositionBufferHandle = Pointer<RawUnicodePositionBuffer>;
final class RawLineBreakBuffer extends Opaque {}
typedef LineBreakBufferHandle = Pointer<RawLineBreakBuffer>;
final class LineBreak extends Struct {
@Int32()
external int position;
@Int32()
external int lineBreakType;
}
@Native<Void Function(ParagraphHandle)>(symbol: 'paragraph_dispose', isLeaf: true)
external void paragraphDispose(ParagraphHandle handle);
@Native<Float Function(ParagraphHandle)>(symbol: 'paragraph_getWidth', isLeaf: true)
external double paragraphGetWidth(ParagraphHandle handle);
@Native<Float Function(ParagraphHandle)>(symbol: 'paragraph_getHeight', isLeaf: true)
external double paragraphGetHeight(ParagraphHandle handle);
@Native<Float Function(ParagraphHandle)>(symbol: 'paragraph_getLongestLine', isLeaf: true)
external double paragraphGetLongestLine(ParagraphHandle handle);
@Native<Float Function(ParagraphHandle)>(symbol: 'paragraph_getMinIntrinsicWidth', isLeaf: true)
external double paragraphGetMinIntrinsicWidth(ParagraphHandle handle);
@Native<Float Function(ParagraphHandle)>(symbol: 'paragraph_getMaxIntrinsicWidth', isLeaf: true)
external double paragraphGetMaxIntrinsicWidth(ParagraphHandle handle);
@Native<Float Function(ParagraphHandle)>(symbol: 'paragraph_getAlphabeticBaseline', isLeaf: true)
external double paragraphGetAlphabeticBaseline(ParagraphHandle handle);
@Native<Float Function(ParagraphHandle)>(symbol: 'paragraph_getIdeographicBaseline', isLeaf: true)
external double paragraphGetIdeographicBaseline(ParagraphHandle handle);
@Native<Bool Function(ParagraphHandle)>(symbol: 'paragraph_getDidExceedMaxLines', isLeaf: true)
external bool paragraphGetDidExceedMaxLines(ParagraphHandle handle);
@Native<Void Function(ParagraphHandle, Float)>(symbol: 'paragraph_layout', isLeaf: true)
external void paragraphLayout(ParagraphHandle handle, double width);
@Native<Int32 Function(
ParagraphHandle,
Float,
Float,
Pointer<Int32>
)>(symbol: 'paragraph_getPositionForOffset', isLeaf: true)
external int paragraphGetPositionForOffset(
ParagraphHandle handle,
double offsetX,
double offsetY,
Pointer<Int32> outAffinity,
);
@Native<Bool Function(ParagraphHandle, Float, Float, RawRect, Pointer<Uint32>, Pointer<Bool>)>(symbol: 'paragraph_getClosestGlyphInfoAtCoordinate')
external bool paragraphGetClosestGlyphInfoAtCoordinate(
ParagraphHandle handle,
double offsetX, double offsetY,
RawRect graphemeLayoutBounds, // 4 floats, [LTRB]
Pointer<Uint32> graphemeCodeUnitRange, // 2 `size_t`s, start and end.
Pointer<Bool> booleanFlags, // 1 boolean, isLTR.
);
@Native<Bool Function(ParagraphHandle, Uint32, RawRect, Pointer<Uint32>, Pointer<Bool>)>(symbol: 'paragraph_getGlyphInfoAt')
external bool paragraphGetGlyphInfoAt(
ParagraphHandle handle,
int codeUnitOffset,
RawRect graphemeLayoutBounds, // 4 floats, [LTRB]
Pointer<Uint32> graphemeCodeUnitRange, // 2 `size_t`s, start and end.
Pointer<Bool> booleanFlags, // 1 boolean, isLTR.
);
@Native<Void Function(
ParagraphHandle,
UnsignedInt,
Pointer<Int32>,
)>(symbol: 'paragraph_getWordBoundary', isLeaf: true)
external void paragraphGetWordBoundary(
ParagraphHandle handle,
int position,
Pointer<Int32> outRange, // Two `size_t`s, start and end
);
@Native<Size Function(ParagraphHandle)>(symbol: 'paragraph_getLineCount', isLeaf: true)
external int paragraphGetLineCount(ParagraphHandle handle);
@Native<Int Function(ParagraphHandle, Size)>(symbol: 'paragraph_getLineNumberAt', isLeaf: true)
external int paragraphGetLineNumberAt(ParagraphHandle handle, int characterIndex);
@Native<LineMetricsHandle Function(
ParagraphHandle,
Size,
)>(symbol: 'paragraph_getLineMetricsAtIndex', isLeaf: true)
external LineMetricsHandle paragraphGetLineMetricsAtIndex(
ParagraphHandle handle,
int index,
);
@Native<Void Function(TextBoxListHandle)>(symbol: 'textBoxList_dispose', isLeaf: true)
external void textBoxListDispose(TextBoxListHandle handle);
@Native<Size Function(TextBoxListHandle)>(symbol: 'textBoxList_getLength', isLeaf: true)
external int textBoxListGetLength(TextBoxListHandle handle);
@Native<Int Function(
TextBoxListHandle,
Size,
RawRect
)>(symbol: 'textBoxList_getBoxAtIndex', isLeaf: true)
external int textBoxListGetBoxAtIndex(
TextBoxListHandle handle,
int index,
RawRect outRect,
);
@Native<TextBoxListHandle Function(
ParagraphHandle,
Int,
Int,
Int,
Int,
)>(symbol: 'paragraph_getBoxesForRange', isLeaf: true)
external TextBoxListHandle paragraphGetBoxesForRange(
ParagraphHandle handle,
int start,
int end,
int heightStyle,
int widthStyle,
);
@Native<TextBoxListHandle Function(ParagraphHandle)>(
symbol: 'paragraph_getBoxesForPlaceholders', isLeaf: true)
external TextBoxListHandle paragraphGetBoxesForPlaceholders(ParagraphHandle handle);
// Returns a list of the code points that were unable to be rendered with the
// selected fonts. The list is deduplicated, so each code point in the output
// is unique.
// If `nullptr` is passed in for `outCodePoints`, we simply return the count
// of the code points.
// Note: This must be called after the paragraph has been laid out at least
// once in order to get valid data.
@Native<Int Function(
ParagraphHandle,
Pointer<Uint32>,
Int,
)>(symbol: 'paragraph_getUnresolvedCodePoints', isLeaf: true)
external int paragraphGetUnresolvedCodePoints(
ParagraphHandle handle,
Pointer<Uint32> outCodePoints,
int outLength,
);
| engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/text/raw_paragraph.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/text/raw_paragraph.dart",
"repo_id": "engine",
"token_count": 1902
} | 300 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/// Splits [text] into a list of [TextFragment]s.
///
/// Various subclasses can perform the fragmenting based on their own criteria.
///
/// See:
///
/// - [LineBreakFragmenter]: Fragments text based on line break opportunities.
/// - [BidiFragmenter]: Fragments text based on directionality.
abstract class TextFragmenter {
const TextFragmenter(this.text);
/// The text to be fragmented.
final String text;
/// Performs the fragmenting of [text] and returns a list of [TextFragment]s.
List<TextFragment> fragment();
}
/// Represents a fragment produced by [TextFragmenter].
abstract class TextFragment {
const TextFragment(this.start, this.end);
final int start;
final int end;
/// Whether this fragment's range overlaps with the range from [start] to [end].
bool overlapsWith(int start, int end) {
return start < this.end && this.start < end;
}
}
| engine/lib/web_ui/lib/src/engine/text/fragmenter.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/text/fragmenter.dart",
"repo_id": "engine",
"token_count": 302
} | 301 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import '../browser_detection.dart';
import '../dom.dart';
/// Various types of inputs used in text fields.
///
/// These types are coming from Flutter's [TextInputType]. Currently, we don't
/// support all the types. We fallback to [EngineInputType.text] when Flutter
/// sends a type that isn't supported.
// TODO(mdebbar): Support more types.
abstract class EngineInputType {
const EngineInputType();
static EngineInputType fromName(String name, {bool isDecimal = false, bool isMultiline = false}) {
switch (name) {
case 'TextInputType.number':
return isDecimal ? decimal : number;
case 'TextInputType.phone':
return phone;
case 'TextInputType.emailAddress':
return emailAddress;
case 'TextInputType.url':
return url;
case 'TextInputType.multiline':
return multiline;
case 'TextInputType.none':
return isMultiline ? multilineNone : none;
case 'TextInputType.text':
default:
return text;
}
}
/// No text input.
static const NoTextInputType none = NoTextInputType();
/// Multi-line no text input.
static const MultilineNoTextInputType multilineNone = MultilineNoTextInputType();
/// Single-line text input type.
static const TextInputType text = TextInputType();
/// Numeric input type.
static const NumberInputType number = NumberInputType();
/// Decimal input type.
static const DecimalInputType decimal = DecimalInputType();
/// Phone number input type.
static const PhoneInputType phone = PhoneInputType();
/// Email address input type.
static const EmailInputType emailAddress = EmailInputType();
/// URL input type.
static const UrlInputType url = UrlInputType();
/// Multi-line text input type.
static const MultilineInputType multiline = MultilineInputType();
/// The HTML `inputmode` attribute to be set on the DOM element.
///
/// This HTML attribute helps the browser decide what kind of keyboard works
/// best for this text field.
///
/// For various `inputmode` values supported by browsers, see:
/// <https://developer.mozilla.org/en-US/docs/Web/HTML/Global_attributes/inputmode>.
String? get inputmodeAttribute;
/// Create the appropriate DOM element for this input type.
DomHTMLElement createDomElement() => createDomHTMLInputElement();
/// Given a [domElement], set attributes that are specific to this input type.
void configureInputMode(DomHTMLElement domElement) {
if (inputmodeAttribute == null) {
return;
}
// Only apply `inputmode` in mobile browsers so that the right virtual
// keyboard shows up.
if (operatingSystem == OperatingSystem.iOs ||
operatingSystem == OperatingSystem.android ||
inputmodeAttribute == EngineInputType.none.inputmodeAttribute) {
domElement.setAttribute('inputmode', inputmodeAttribute!);
}
}
}
/// No text input.
class NoTextInputType extends EngineInputType {
const NoTextInputType();
@override
String get inputmodeAttribute => 'none';
}
/// See: https://github.com/flutter/flutter/issues/125875
/// Multi-line no text input from system virtual keyboard.
///
/// Use this for inputting multiple lines with a customized keyboard.
///
/// When Flutter uses a custom virtual keyboard, it sends [TextInputType.none]
/// with a [isMultiline] flag to block the system virtual keyboard.
///
/// For [MultilineNoTextInputType] (mapped to [TextInputType.none] with
/// [isMultiline] = true), it creates a <textarea> element with the
/// inputmode="none" attribute.
///
/// For [NoTextInputType] (mapped to [TextInputType.none] with
/// [isMultiline] = false), it creates an <input> element with the
/// inputmode="none" attribute.
class MultilineNoTextInputType extends MultilineInputType {
const MultilineNoTextInputType();
@override
String? get inputmodeAttribute => 'none';
@override
DomHTMLElement createDomElement() => createDomHTMLTextAreaElement();
}
/// Single-line text input type.
class TextInputType extends EngineInputType {
const TextInputType();
@override
String? get inputmodeAttribute => null;
}
/// Numeric input type.
///
/// Input keyboard with only the digits 0–9.
class NumberInputType extends EngineInputType {
const NumberInputType();
@override
String get inputmodeAttribute => 'numeric';
}
/// Decimal input type.
///
/// Input keyboard with containing the digits 0–9 and a decimal separator.
/// Separator can be `.`, `,` depending on the locale.
class DecimalInputType extends EngineInputType {
const DecimalInputType();
@override
String get inputmodeAttribute => 'decimal';
}
/// Phone number input type.
class PhoneInputType extends EngineInputType {
const PhoneInputType();
@override
String get inputmodeAttribute => 'tel';
}
/// Email address input type.
class EmailInputType extends EngineInputType {
const EmailInputType();
@override
String get inputmodeAttribute => 'email';
}
/// URL input type.
class UrlInputType extends EngineInputType {
const UrlInputType();
@override
String get inputmodeAttribute => 'url';
}
/// Multi-line text input type.
class MultilineInputType extends EngineInputType {
const MultilineInputType();
@override
String? get inputmodeAttribute => null;
@override
DomHTMLElement createDomElement() => createDomHTMLTextAreaElement();
}
| engine/lib/web_ui/lib/src/engine/text_editing/input_type.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/text_editing/input_type.dart",
"repo_id": "engine",
"token_count": 1636
} | 302 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_interop';
import 'package:meta/meta.dart';
import 'package:ui/src/engine.dart';
import '../dom.dart';
/// This is state persistent across hot restarts that indicates what
/// to clear. Delay removal of old visible state to make the
/// transition appear smooth.
@JS('window.__flutterState')
external JSArray<JSAny?>? get _jsHotRestartStore;
@JS('window.__flutterState')
external set _jsHotRestartStore(JSArray<JSAny?>? nodes);
/// Handles [DomElement]s that need to be removed after a hot-restart.
///
/// This class shouldn't be used directly. It's only made public for testing
/// purposes. Instead, use [registerElementForCleanup].
///
/// Elements are stored in a [JSArray] stored globally at `window.__flutterState`.
///
/// When the app hot-restarts (and a new instance of this class is created),
/// all elements in the global [JSArray] is removed from the DOM.
class HotRestartCacheHandler {
@visibleForTesting
HotRestartCacheHandler() {
_resetHotRestartStore();
}
/// Removes every element that was registered prior to the hot-restart from
/// the DOM.
void _resetHotRestartStore() {
final JSArray<JSAny?>? jsStore = _jsHotRestartStore;
if (jsStore != null) {
// We are in a post hot-restart world, clear the elements now.
final List<Object?> store = jsStore.toObjectShallow as List<Object?>;
for (final Object? element in store) {
if (element != null) {
(element as DomElement).remove();
}
}
}
_jsHotRestartStore = JSArray<JSAny?>();
}
/// Registers a [DomElement] to be removed after hot-restart.
@visibleForTesting
void registerElement(DomElement element) {
_jsHotRestartStore!.push(element as JSObject);
}
}
final HotRestartCacheHandler? _hotRestartCache = () {
// In release mode, we don't need a hot restart cache, so we leave it null.
HotRestartCacheHandler? cache;
assert(() {
cache = HotRestartCacheHandler();
return true;
}());
return cache;
}();
/// Registers a [DomElement] to be cleaned up after hot restart.
void registerElementForCleanup(DomElement element) {
_hotRestartCache?.registerElement(element);
}
| engine/lib/web_ui/lib/src/engine/view_embedder/hot_restart_cache_handler.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/view_embedder/hot_restart_cache_handler.dart",
"repo_id": "engine",
"token_count": 746
} | 303 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
extension SingletonFlutterWindowExtension on ui.SingletonFlutterWindow {
/// Overrides the value of [physicalSize] in tests.
set debugPhysicalSizeOverride(ui.Size? value) {
(this as EngineFlutterWindow).debugPhysicalSizeOverride = value;
}
}
/// Overrides the value of [ui.FlutterView.devicePixelRatio] in tests.
///
/// Passing `null` resets the device pixel ratio to the browser's default.
void debugOverrideDevicePixelRatio(double? value) {
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(value);
}
/// Whether the Flutter engine is running in `flutter test` emulation mode.
///
/// When true, the engine will emulate a specific screen size, and always
/// use the "Ahem" font to reduce test flakiness and dependence on the test
/// environment.
bool get debugEmulateFlutterTesterEnvironment =>
_debugEmulateFlutterTesterEnvironment;
/// Sets whether the Flutter engine is running in `flutter test` emulation mode.
set debugEmulateFlutterTesterEnvironment(bool value) {
_debugEmulateFlutterTesterEnvironment = value;
if (_debugEmulateFlutterTesterEnvironment) {
const ui.Size logicalSize = ui.Size(800.0, 600.0);
final EngineFlutterWindow? implicitView = EnginePlatformDispatcher.instance.implicitView;
implicitView?.debugPhysicalSizeOverride = logicalSize * implicitView.devicePixelRatio;
}
debugDisableFontFallbacks = value;
}
bool _debugEmulateFlutterTesterEnvironment = false;
| engine/lib/web_ui/lib/ui_web/src/ui_web/testing.dart/0 | {
"file_path": "engine/lib/web_ui/lib/ui_web/src/ui_web/testing.dart",
"repo_id": "engine",
"token_count": 480
} | 304 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "export.h"
#include "helpers.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/effects/SkGradientShader.h"
#include "third_party/skia/include/effects/SkRuntimeEffect.h"
#include "wrappers.h"
using namespace Skwasm;
SKWASM_EXPORT SkShader* shader_createLinearGradient(
SkPoint* endPoints, // Two points
SkColor* colors,
SkScalar* stops,
int count, // Number of stops/colors
SkTileMode tileMode,
SkScalar* matrix33 // Can be nullptr
) {
if (matrix33) {
SkMatrix localMatrix = createMatrix(matrix33);
return SkGradientShader::MakeLinear(endPoints, colors, stops, count,
tileMode, 0, &localMatrix)
.release();
} else {
return SkGradientShader::MakeLinear(endPoints, colors, stops, count,
tileMode)
.release();
}
}
SKWASM_EXPORT SkShader* shader_createRadialGradient(SkScalar centerX,
SkScalar centerY,
SkScalar radius,
SkColor* colors,
SkScalar* stops,
int count,
SkTileMode tileMode,
SkScalar* matrix33) {
if (matrix33) {
SkMatrix localMatrix = createMatrix(matrix33);
return SkGradientShader::MakeRadial({centerX, centerY}, radius, colors,
stops, count, tileMode, 0, &localMatrix)
.release();
} else {
return SkGradientShader::MakeRadial({centerX, centerY}, radius, colors,
stops, count, tileMode)
.release();
}
}
SKWASM_EXPORT SkShader* shader_createConicalGradient(
SkPoint* endPoints, // Two points
SkScalar startRadius,
SkScalar endRadius,
SkColor* colors,
SkScalar* stops,
int count,
SkTileMode tileMode,
SkScalar* matrix33) {
if (matrix33) {
SkMatrix localMatrix = createMatrix(matrix33);
return SkGradientShader::MakeTwoPointConical(
endPoints[0], startRadius, endPoints[1], endRadius, colors,
stops, count, tileMode, 0, &localMatrix)
.release();
} else {
return SkGradientShader::MakeTwoPointConical(endPoints[0], startRadius,
endPoints[1], endRadius,
colors, stops, count, tileMode)
.release();
}
}
SKWASM_EXPORT SkShader* shader_createSweepGradient(SkScalar centerX,
SkScalar centerY,
SkColor* colors,
SkScalar* stops,
int count,
SkTileMode tileMode,
SkScalar startAngle,
SkScalar endAngle,
SkScalar* matrix33) {
if (matrix33) {
SkMatrix localMatrix = createMatrix(matrix33);
return SkGradientShader::MakeSweep(centerX, centerY, colors, stops, count,
tileMode, startAngle, endAngle, 0,
&localMatrix)
.release();
} else {
return SkGradientShader::MakeSweep(centerX, centerY, colors, stops, count,
tileMode, startAngle, endAngle, 0,
nullptr)
.release();
}
}
SKWASM_EXPORT void shader_dispose(SkShader* shader) {
shader->unref();
}
SKWASM_EXPORT SkRuntimeEffect* runtimeEffect_create(SkString* source) {
auto result = SkRuntimeEffect::MakeForShader(*source);
if (result.effect == nullptr) {
printf("Failed to compile shader. Error text:\n%s",
result.errorText.data());
return nullptr;
} else {
return result.effect.release();
}
}
SKWASM_EXPORT void runtimeEffect_dispose(SkRuntimeEffect* effect) {
effect->unref();
}
SKWASM_EXPORT size_t runtimeEffect_getUniformSize(SkRuntimeEffect* effect) {
return effect->uniformSize();
}
SKWASM_EXPORT SkShader* shader_createRuntimeEffectShader(
SkRuntimeEffect* runtimeEffect,
SkData* uniforms,
SkShader** children,
size_t childCount) {
std::vector<sk_sp<SkShader>> childPointers;
for (size_t i = 0; i < childCount; i++) {
childPointers.emplace_back(sk_ref_sp<SkShader>(children[i]));
}
return runtimeEffect
->makeShader(SkData::MakeWithCopy(uniforms->data(), uniforms->size()),
childPointers.data(), childCount, nullptr)
.release();
}
SKWASM_EXPORT SkShader* shader_createFromImage(SkImage* image,
SkTileMode tileModeX,
SkTileMode tileModeY,
FilterQuality quality,
SkScalar* matrix33) {
if (matrix33) {
SkMatrix localMatrix = createMatrix(matrix33);
return image
->makeShader(tileModeX, tileModeY, samplingOptionsForQuality(quality),
&localMatrix)
.release();
} else {
return image
->makeShader(tileModeX, tileModeY, samplingOptionsForQuality(quality))
.release();
}
}
| engine/lib/web_ui/skwasm/shaders.cpp/0 | {
"file_path": "engine/lib/web_ui/skwasm/shaders.cpp",
"repo_id": "engine",
"token_count": 3070
} | 305 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_interop';
import 'dart:math';
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'package:web_engine_tester/golden_tester.dart';
import '../common/matchers.dart';
import 'common.dart';
import 'test_data.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
group('CanvasKit API', () {
setUpCanvasKitTest();
_blendModeTests();
_paintStyleTests();
_strokeCapTests();
_strokeJoinTests();
_blurStyleTests();
_tileModeTests();
_fillTypeTests();
_pathOpTests();
_clipOpTests();
_pointModeTests();
_vertexModeTests();
_imageTests();
_shaderTests();
_paintTests();
_maskFilterTests();
_colorFilterTests();
_imageFilterTests();
_mallocTests();
_sharedColorTests();
_toSkPointTests();
_toSkColorStopsTests();
_toSkMatrixFromFloat32Tests();
_toSkM44FromFloat32Tests();
_matrix4x4CompositionTests();
_toSkRectTests();
_skVerticesTests();
_pictureTests();
group('SkParagraph', () {
_paragraphTests();
});
group('SkPath', () {
_pathTests();
});
group('SkCanvas', () {
_canvasTests();
});
group('SkParagraph', () {
_textStyleTests();
});
});
}
void _blendModeTests() {
test('blend mode mapping is correct', () {
expect(canvasKit.BlendMode.Clear.value, ui.BlendMode.clear.index);
expect(canvasKit.BlendMode.Src.value, ui.BlendMode.src.index);
expect(canvasKit.BlendMode.Dst.value, ui.BlendMode.dst.index);
expect(canvasKit.BlendMode.SrcOver.value, ui.BlendMode.srcOver.index);
expect(canvasKit.BlendMode.DstOver.value, ui.BlendMode.dstOver.index);
expect(canvasKit.BlendMode.SrcIn.value, ui.BlendMode.srcIn.index);
expect(canvasKit.BlendMode.DstIn.value, ui.BlendMode.dstIn.index);
expect(canvasKit.BlendMode.SrcOut.value, ui.BlendMode.srcOut.index);
expect(canvasKit.BlendMode.DstOut.value, ui.BlendMode.dstOut.index);
expect(canvasKit.BlendMode.SrcATop.value, ui.BlendMode.srcATop.index);
expect(canvasKit.BlendMode.DstATop.value, ui.BlendMode.dstATop.index);
expect(canvasKit.BlendMode.Xor.value, ui.BlendMode.xor.index);
expect(canvasKit.BlendMode.Plus.value, ui.BlendMode.plus.index);
expect(canvasKit.BlendMode.Modulate.value, ui.BlendMode.modulate.index);
expect(canvasKit.BlendMode.Screen.value, ui.BlendMode.screen.index);
expect(canvasKit.BlendMode.Overlay.value, ui.BlendMode.overlay.index);
expect(canvasKit.BlendMode.Darken.value, ui.BlendMode.darken.index);
expect(canvasKit.BlendMode.Lighten.value, ui.BlendMode.lighten.index);
expect(canvasKit.BlendMode.ColorDodge.value, ui.BlendMode.colorDodge.index);
expect(canvasKit.BlendMode.ColorBurn.value, ui.BlendMode.colorBurn.index);
expect(canvasKit.BlendMode.HardLight.value, ui.BlendMode.hardLight.index);
expect(canvasKit.BlendMode.SoftLight.value, ui.BlendMode.softLight.index);
expect(canvasKit.BlendMode.Difference.value, ui.BlendMode.difference.index);
expect(canvasKit.BlendMode.Exclusion.value, ui.BlendMode.exclusion.index);
expect(canvasKit.BlendMode.Multiply.value, ui.BlendMode.multiply.index);
expect(canvasKit.BlendMode.Hue.value, ui.BlendMode.hue.index);
expect(canvasKit.BlendMode.Saturation.value, ui.BlendMode.saturation.index);
expect(canvasKit.BlendMode.Color.value, ui.BlendMode.color.index);
expect(canvasKit.BlendMode.Luminosity.value, ui.BlendMode.luminosity.index);
});
test('ui.BlendMode converts to SkBlendMode', () {
for (final ui.BlendMode blendMode in ui.BlendMode.values) {
expect(toSkBlendMode(blendMode).value, blendMode.index);
}
});
}
void _paintStyleTests() {
test('paint style mapping is correct', () {
expect(canvasKit.PaintStyle.Fill.value, ui.PaintingStyle.fill.index);
expect(canvasKit.PaintStyle.Stroke.value, ui.PaintingStyle.stroke.index);
});
test('ui.PaintingStyle converts to SkPaintStyle', () {
for (final ui.PaintingStyle style in ui.PaintingStyle.values) {
expect(toSkPaintStyle(style).value, style.index);
}
});
}
void _strokeCapTests() {
test('stroke cap mapping is correct', () {
expect(canvasKit.StrokeCap.Butt.value, ui.StrokeCap.butt.index);
expect(canvasKit.StrokeCap.Round.value, ui.StrokeCap.round.index);
expect(canvasKit.StrokeCap.Square.value, ui.StrokeCap.square.index);
});
test('ui.StrokeCap converts to SkStrokeCap', () {
for (final ui.StrokeCap cap in ui.StrokeCap.values) {
expect(toSkStrokeCap(cap).value, cap.index);
}
});
}
void _strokeJoinTests() {
test('stroke cap mapping is correct', () {
expect(canvasKit.StrokeJoin.Miter.value, ui.StrokeJoin.miter.index);
expect(canvasKit.StrokeJoin.Round.value, ui.StrokeJoin.round.index);
expect(canvasKit.StrokeJoin.Bevel.value, ui.StrokeJoin.bevel.index);
});
test('ui.StrokeJoin converts to SkStrokeJoin', () {
for (final ui.StrokeJoin join in ui.StrokeJoin.values) {
expect(toSkStrokeJoin(join).value, join.index);
}
});
}
void _blurStyleTests() {
test('blur style mapping is correct', () {
expect(canvasKit.BlurStyle.Normal.value, ui.BlurStyle.normal.index);
expect(canvasKit.BlurStyle.Solid.value, ui.BlurStyle.solid.index);
expect(canvasKit.BlurStyle.Outer.value, ui.BlurStyle.outer.index);
expect(canvasKit.BlurStyle.Inner.value, ui.BlurStyle.inner.index);
});
test('ui.BlurStyle converts to SkBlurStyle', () {
for (final ui.BlurStyle style in ui.BlurStyle.values) {
expect(toSkBlurStyle(style).value, style.index);
}
});
}
void _tileModeTests() {
test('tile mode mapping is correct', () {
expect(canvasKit.TileMode.Clamp.value, ui.TileMode.clamp.index);
expect(canvasKit.TileMode.Repeat.value, ui.TileMode.repeated.index);
expect(canvasKit.TileMode.Mirror.value, ui.TileMode.mirror.index);
});
test('ui.TileMode converts to SkTileMode', () {
for (final ui.TileMode mode in ui.TileMode.values) {
expect(toSkTileMode(mode).value, mode.index);
}
});
}
void _fillTypeTests() {
test('fill type mapping is correct', () {
expect(canvasKit.FillType.Winding.value, ui.PathFillType.nonZero.index);
expect(canvasKit.FillType.EvenOdd.value, ui.PathFillType.evenOdd.index);
});
test('ui.PathFillType converts to SkFillType', () {
for (final ui.PathFillType type in ui.PathFillType.values) {
expect(toSkFillType(type).value, type.index);
}
});
}
void _pathOpTests() {
test('path op mapping is correct', () {
expect(
canvasKit.PathOp.Difference.value, ui.PathOperation.difference.index);
expect(canvasKit.PathOp.Intersect.value, ui.PathOperation.intersect.index);
expect(canvasKit.PathOp.Union.value, ui.PathOperation.union.index);
expect(canvasKit.PathOp.XOR.value, ui.PathOperation.xor.index);
expect(canvasKit.PathOp.ReverseDifference.value,
ui.PathOperation.reverseDifference.index);
});
test('ui.PathOperation converts to SkPathOp', () {
for (final ui.PathOperation op in ui.PathOperation.values) {
expect(toSkPathOp(op).value, op.index);
}
});
test('Path.combine test', () {
final ui.Path path1 = ui.Path();
expect(path1, isA<CkPath>());
path1.addRect(const ui.Rect.fromLTRB(0, 0, 10, 10));
path1.addOval(const ui.Rect.fromLTRB(10, 10, 100, 100));
final ui.Path path2 = ui.Path();
expect(path2, isA<CkPath>());
path2.addRect(const ui.Rect.fromLTRB(5, 5, 15, 15));
path2.addOval(const ui.Rect.fromLTRB(15, 15, 105, 105));
final ui.Path union = ui.Path.combine(ui.PathOperation.union, path1, path2);
expect(union, isA<CkPath>());
expect(union.getBounds(), const ui.Rect.fromLTRB(0, 0, 105, 105));
// Smoke-test other operations.
for (final ui.PathOperation operation in ui.PathOperation.values) {
final ui.Path combined = ui.Path.combine(operation, path1, path2);
expect(combined, isA<CkPath>());
}
});
}
void _clipOpTests() {
test('clip op mapping is correct', () {
expect(canvasKit.ClipOp.Difference.value, ui.ClipOp.difference.index);
expect(canvasKit.ClipOp.Intersect.value, ui.ClipOp.intersect.index);
});
test('ui.ClipOp converts to SkClipOp', () {
for (final ui.ClipOp op in ui.ClipOp.values) {
expect(toSkClipOp(op).value, op.index);
}
});
}
void _pointModeTests() {
test('point mode mapping is correct', () {
expect(canvasKit.PointMode.Points.value, ui.PointMode.points.index);
expect(canvasKit.PointMode.Lines.value, ui.PointMode.lines.index);
expect(canvasKit.PointMode.Polygon.value, ui.PointMode.polygon.index);
});
test('ui.PointMode converts to SkPointMode', () {
for (final ui.PointMode op in ui.PointMode.values) {
expect(toSkPointMode(op).value, op.index);
}
});
}
void _vertexModeTests() {
test('vertex mode mapping is correct', () {
expect(canvasKit.VertexMode.Triangles.value, ui.VertexMode.triangles.index);
expect(canvasKit.VertexMode.TrianglesStrip.value,
ui.VertexMode.triangleStrip.index);
expect(canvasKit.VertexMode.TriangleFan.value,
ui.VertexMode.triangleFan.index);
});
test('ui.VertexMode converts to SkVertexMode', () {
for (final ui.VertexMode op in ui.VertexMode.values) {
expect(toSkVertexMode(op).value, op.index);
}
});
}
void _imageTests() {
test('MakeAnimatedImageFromEncoded makes a non-animated image', () {
final SkAnimatedImage nonAnimated =
canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!;
expect(nonAnimated.getFrameCount(), 1);
expect(nonAnimated.getRepetitionCount(), 0);
expect(nonAnimated.width(), 1);
expect(nonAnimated.height(), 1);
final SkImage frame = nonAnimated.makeImageAtCurrentFrame();
expect(frame.width(), 1);
expect(frame.height(), 1);
expect(nonAnimated.decodeNextFrame(), -1);
expect(
frame.makeShaderOptions(
canvasKit.TileMode.Repeat,
canvasKit.TileMode.Mirror,
canvasKit.FilterMode.Linear,
canvasKit.MipmapMode.Nearest,
toSkMatrixFromFloat32(Matrix4.identity().storage),
),
isNotNull,
);
});
test('MakeAnimatedImageFromEncoded makes an animated image', () {
final SkAnimatedImage animated =
canvasKit.MakeAnimatedImageFromEncoded(kAnimatedGif)!;
expect(animated.getFrameCount(), 3);
expect(animated.getRepetitionCount(), -1); // animates forever
expect(animated.width(), 1);
expect(animated.height(), 1);
for (int i = 0; i < 100; i++) {
final SkImage frame = animated.makeImageAtCurrentFrame();
expect(frame.width(), 1);
expect(frame.height(), 1);
expect(animated.decodeNextFrame(), 100);
}
});
}
void _shaderTests() {
test('MakeLinearGradient', () {
expect(_makeTestShader(), isNotNull);
});
test('MakeRadialGradient', () {
expect(
canvasKit.Shader.MakeRadialGradient(
Float32List.fromList(<double>[1, 1]),
10.0,
Uint32List.fromList(<int>[0xff000000, 0xffffffff]),
Float32List.fromList(<double>[0, 1]),
canvasKit.TileMode.Repeat,
toSkMatrixFromFloat32(Matrix4.identity().storage),
0,
),
isNotNull);
});
test('MakeTwoPointConicalGradient', () {
expect(
canvasKit.Shader.MakeTwoPointConicalGradient(
Float32List.fromList(<double>[1, 1]),
10.0,
Float32List.fromList(<double>[1, 1]),
10.0,
Uint32List.fromList(<int>[0xff000000, 0xffffffff]),
Float32List.fromList(<double>[0, 1]),
canvasKit.TileMode.Repeat,
toSkMatrixFromFloat32(Matrix4.identity().storage),
0,
),
isNotNull);
});
test('RuntimeEffect', () {
const String kSkSlProgram = r'''
half4 main(vec2 fragCoord) {
return vec4(1.0, 0.0, 0.0, 1.0);
}
''';
final SkRuntimeEffect? effect = MakeRuntimeEffect(kSkSlProgram);
expect(effect, isNotNull);
const String kInvalidSkSlProgram = '';
// Invalid SkSL returns null.
final SkRuntimeEffect? invalidEffect = MakeRuntimeEffect(kInvalidSkSlProgram);
expect(invalidEffect, isNull);
final SkShader? shader = effect!.makeShader(<double>[]);
expect(shader, isNotNull);
// mismatched uniforms returns null.
final SkShader? invalidShader = effect.makeShader(<double>[1]);
expect(invalidShader, isNull);
const String kSkSlProgramWithUniforms = r'''
uniform vec4 u_color;
half4 main(vec2 fragCoord) {
return u_color;
}
''';
final SkShader? shaderWithUniform = MakeRuntimeEffect(kSkSlProgramWithUniforms)
!.makeShader(<double>[1.0, 0.0, 0.0, 1.0]);
expect(shaderWithUniform, isNotNull);
});
}
SkShader _makeTestShader() {
return canvasKit.Shader.MakeLinearGradient(
Float32List.fromList(<double>[0, 0]),
Float32List.fromList(<double>[1, 1]),
Uint32List.fromList(<int>[0xff0000ff]),
Float32List.fromList(<double>[0, 1]),
canvasKit.TileMode.Repeat,
null,
);
}
void _paintTests() {
test('can make SkPaint', () async {
final SkPaint paint = SkPaint();
paint.setBlendMode(canvasKit.BlendMode.SrcOut);
paint.setStyle(canvasKit.PaintStyle.Stroke);
paint.setStrokeWidth(3.0);
paint.setStrokeCap(canvasKit.StrokeCap.Round);
paint.setStrokeJoin(canvasKit.StrokeJoin.Bevel);
paint.setAntiAlias(true);
paint.setColorInt(0x00FFCCAA);
paint.setShader(_makeTestShader());
paint.setMaskFilter(canvasKit.MaskFilter.MakeBlur(
canvasKit.BlurStyle.Outer,
2.0,
true,
));
paint.setColorFilter(canvasKit.ColorFilter.MakeLinearToSRGBGamma());
paint.setStrokeMiter(1.4);
paint.setImageFilter(canvasKit.ImageFilter.MakeBlur(
1,
2,
canvasKit.TileMode.Repeat,
null,
));
});
}
void _maskFilterTests() {
test('MaskFilter.MakeBlur', () {
expect(
canvasKit.MaskFilter.MakeBlur(
canvasKit.BlurStyle.Outer,
5.0,
false,
),
isNotNull);
});
test('MaskFilter.MakeBlur with 0 sigma returns null', () {
expect(
canvasKit.MaskFilter.MakeBlur(canvasKit.BlurStyle.Normal, 0.0, false),
isNull);
});
test('MaskFilter.MakeBlur with NaN sigma returns null', () {
expect(
canvasKit.MaskFilter.MakeBlur(
canvasKit.BlurStyle.Normal, double.nan, false),
isNull);
});
}
void _colorFilterTests() {
test('MakeBlend', () {
expect(
canvasKit.ColorFilter.MakeBlend(
Float32List.fromList(<double>[0, 0, 0, 1]),
canvasKit.BlendMode.SrcATop,
),
isNotNull,
);
});
test('MakeMatrix', () {
expect(
canvasKit.ColorFilter.MakeMatrix(
Float32List(20),
),
isNotNull,
);
});
test('MakeSRGBToLinearGamma', () {
expect(
canvasKit.ColorFilter.MakeSRGBToLinearGamma(),
isNotNull,
);
});
test('MakeLinearToSRGBGamma', () {
expect(
canvasKit.ColorFilter.MakeLinearToSRGBGamma(),
isNotNull,
);
});
}
void _imageFilterTests() {
test('MakeBlur', () {
expect(
canvasKit.ImageFilter.MakeBlur(1, 2, canvasKit.TileMode.Repeat, null),
isNotNull,
);
});
test('toSkFilterOptions', () {
for (final ui.FilterQuality filterQuality in ui.FilterQuality.values) {
expect(toSkFilterOptions(filterQuality), isNotNull);
}
});
test('MakeMatrixTransform', () {
expect(
canvasKit.ImageFilter.MakeMatrixTransform(
toSkMatrixFromFloat32(Matrix4.identity().storage),
toSkFilterOptions(ui.FilterQuality.medium),
null,
),
isNotNull,
);
});
test('MakeColorFilter', () {
expect(
canvasKit.ImageFilter.MakeColorFilter(
canvasKit.ColorFilter.MakeLinearToSRGBGamma(),
null,
),
isNotNull,
);
});
test('MakeCompose', () {
expect(
canvasKit.ImageFilter.MakeCompose(
canvasKit.ImageFilter.MakeBlur(1, 2, canvasKit.TileMode.Repeat, null),
canvasKit.ImageFilter.MakeBlur(1, 2, canvasKit.TileMode.Repeat, null),
),
isNotNull,
);
});
}
void _mallocTests() {
test('$SkFloat32List', () {
final List<SkFloat32List> lists = <SkFloat32List>[];
for (int size = 0; size < 1000; size++) {
final SkFloat32List skList = mallocFloat32List(4);
expect(skList, isNotNull);
expect(skList.toTypedArray(), hasLength(4));
lists.add(skList);
}
for (final SkFloat32List skList in lists) {
// toTypedArray() still works.
expect(() => skList.toTypedArray(), returnsNormally);
free(skList);
// toTypedArray() throws after free.
expect(() => skList.toTypedArray(), throwsA(isA<Error>()));
}
});
test('$SkUint32List', () {
final List<SkUint32List> lists = <SkUint32List>[];
for (int size = 0; size < 1000; size++) {
final SkUint32List skList = mallocUint32List(4);
expect(skList, isNotNull);
expect(skList.toTypedArray(), hasLength(4));
lists.add(skList);
}
for (final SkUint32List skList in lists) {
// toTypedArray() still works.
expect(() => skList.toTypedArray(), returnsNormally);
free(skList);
// toTypedArray() throws after free.
expect(() => skList.toTypedArray(), throwsA(isA<Error>()));
}
});
}
void _sharedColorTests() {
test('toSharedSkColor1', () {
expect(
toSharedSkColor1(const ui.Color(0xAABBCCDD)),
Float32List(4)
..[0] = 0xBB / 255.0
..[1] = 0xCC / 255.0
..[2] = 0xDD / 255.0
..[3] = 0xAA / 255.0,
);
});
test('toSharedSkColor2', () {
expect(
toSharedSkColor2(const ui.Color(0xAABBCCDD)),
Float32List(4)
..[0] = 0xBB / 255.0
..[1] = 0xCC / 255.0
..[2] = 0xDD / 255.0
..[3] = 0xAA / 255.0,
);
});
test('toSharedSkColor3', () {
expect(
toSharedSkColor3(const ui.Color(0xAABBCCDD)),
Float32List(4)
..[0] = 0xBB / 255.0
..[1] = 0xCC / 255.0
..[2] = 0xDD / 255.0
..[3] = 0xAA / 255.0,
);
});
}
void _toSkPointTests() {
test('toSkPoint', () {
expect(
toSkPoint(const ui.Offset(4, 5)),
Float32List(2)
..[0] = 4.0
..[1] = 5.0,
);
});
}
void _toSkColorStopsTests() {
test('toSkColorStops default', () {
expect(
toSkColorStops(null),
Float32List(2)
..[0] = 0
..[1] = 1,
);
});
test('toSkColorStops custom', () {
expect(
toSkColorStops(<double>[1, 2, 3, 4]),
Float32List(4)
..[0] = 1
..[1] = 2
..[2] = 3
..[3] = 4,
);
});
}
void _toSkMatrixFromFloat32Tests() {
test('toSkMatrixFromFloat32', () {
final Matrix4 matrix = Matrix4.identity()
..translate(1, 2, 3)
..rotateZ(4);
expect(
toSkMatrixFromFloat32(matrix.storage),
Float32List.fromList(<double>[
-0.6536436080932617,
0.756802499294281,
1,
-0.756802499294281,
-0.6536436080932617,
2,
-0.0,
0,
1,
]));
});
}
void _toSkM44FromFloat32Tests() {
test('toSkM44FromFloat32', () {
final Matrix4 matrix = Matrix4.identity()
..translate(1, 2, 3)
..rotateZ(4);
expect(
toSkM44FromFloat32(matrix.storage),
Float32List.fromList(<double>[
-0.6536436080932617,
0.756802499294281,
0,
1,
-0.756802499294281,
-0.6536436080932617,
0,
2,
0,
0,
1,
3,
0,
0,
0,
1,
]));
});
}
typedef CanvasCallback = void Function(ui.Canvas canvas);
Future<ui.Image> toImage(CanvasCallback callback, int width, int height) {
final ui.PictureRecorder recorder = ui.PictureRecorder();
final ui.Canvas canvas = ui.Canvas(
recorder, ui.Rect.fromLTRB(0, 0, width.toDouble(), height.toDouble()));
callback(canvas);
final ui.Picture picture = recorder.endRecording();
return picture.toImage(width, height);
}
/// @returns true When the images are reasonably similar.
/// @todo Make the search actually fuzzy to a certain degree.
Future<bool> fuzzyCompareImages(ui.Image golden, ui.Image img) async {
if (golden.width != img.width || golden.height != img.height) {
return false;
}
int getPixel(ByteData data, int x, int y) =>
data.getUint32((x + y * golden.width) * 4);
final ByteData goldenData = (await golden.toByteData())!;
final ByteData imgData = (await img.toByteData())!;
for (int y = 0; y < golden.height; y++) {
for (int x = 0; x < golden.width; x++) {
if (getPixel(goldenData, x, y) != getPixel(imgData, x, y)) {
return false;
}
}
}
return true;
}
void _matrix4x4CompositionTests() {
test('compose4x4MatrixInCanvas', () async {
const double rotateAroundX = pi / 6; // 30 degrees
const double rotateAroundY = pi / 9; // 20 degrees
const int width = 150;
const int height = 150;
const ui.Color black = ui.Color.fromARGB(255, 0, 0, 0);
const ui.Color green = ui.Color.fromARGB(255, 0, 255, 0);
void paint(ui.Canvas canvas, CanvasCallback rotate) {
canvas.translate(width * 0.5, height * 0.5);
rotate(canvas);
const double width3 = width / 3.0;
const double width5 = width / 5.0;
const double width10 = width / 10.0;
canvas.drawRect(const ui.Rect.fromLTRB(-width3, -width3, width3, width3),
ui.Paint()..color = green);
canvas.drawRect(
const ui.Rect.fromLTRB(-width5, -width5, -width10, width5),
ui.Paint()..color = black);
canvas.drawRect(
const ui.Rect.fromLTRB(-width5, -width5, width5, -width10),
ui.Paint()..color = black);
}
final ui.Image incrementalMatrixImage = await toImage((ui.Canvas canvas) {
paint(canvas, (ui.Canvas canvas) {
final Matrix4 matrix = Matrix4.identity();
matrix.setEntry(3, 2, 0.001);
canvas.transform(matrix.toFloat64());
matrix.setRotationX(rotateAroundX);
canvas.transform(matrix.toFloat64());
matrix.setRotationY(rotateAroundY);
canvas.transform(matrix.toFloat64());
});
}, width, height);
final ui.Image combinedMatrixImage = await toImage((ui.Canvas canvas) {
paint(canvas, (ui.Canvas canvas) {
final Matrix4 matrix = Matrix4.identity();
matrix.setEntry(3, 2, 0.001);
matrix.rotate(kUnitX, rotateAroundX);
matrix.rotate(kUnitY, rotateAroundY);
canvas.transform(matrix.toFloat64());
});
}, width, height);
final bool areEqual =
await fuzzyCompareImages(incrementalMatrixImage, combinedMatrixImage);
expect(areEqual, true);
});
}
void _toSkRectTests() {
test('toSkRect', () {
expect(toSkRect(const ui.Rect.fromLTRB(1, 2, 3, 4)), <double>[1, 2, 3, 4]);
});
test('fromSkRect', () {
expect(fromSkRect(Float32List.fromList(<double>[1, 2, 3, 4])),
const ui.Rect.fromLTRB(1, 2, 3, 4));
});
test('toSkRRect', () {
expect(
toSkRRect(ui.RRect.fromLTRBAndCorners(
1,
2,
3,
4,
topLeft: const ui.Radius.elliptical(5, 6),
topRight: const ui.Radius.elliptical(7, 8),
bottomRight: const ui.Radius.elliptical(9, 10),
bottomLeft: const ui.Radius.elliptical(11, 12),
)),
<double>[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
);
});
}
SkPath _testClosedSkPath() {
return SkPath()
..moveTo(10, 10)
..lineTo(20, 10)
..lineTo(20, 20)
..lineTo(10, 20)
..close();
}
void _pathTests() {
late SkPath path;
setUp(() {
path = SkPath();
});
test('setFillType', () {
path.setFillType(canvasKit.FillType.Winding);
});
test('addArc', () {
path.addArc(
toSkRect(const ui.Rect.fromLTRB(10, 20, 30, 40)),
1,
5,
);
});
test('addOval', () {
path.addOval(
toSkRect(const ui.Rect.fromLTRB(10, 20, 30, 40)),
false,
1,
);
});
test('addPath', () {
path.addPath(_testClosedSkPath(), 1, 0, 0, 0, 1, 0, 0, 0, 0, false);
});
test('addPoly', () {
final SkFloat32List encodedPoints = toMallocedSkPoints(const <ui.Offset>[
ui.Offset.zero,
ui.Offset(10, 10),
]);
path.addPoly(encodedPoints.toTypedArray(), true);
free(encodedPoints);
});
test('addRRect', () {
final ui.RRect rrect = ui.RRect.fromRectAndRadius(
const ui.Rect.fromLTRB(10, 10, 20, 20),
const ui.Radius.circular(3),
);
path.addRRect(
toSkRRect(rrect),
false,
);
});
test('addRect', () {
path.addRect(toSkRect(const ui.Rect.fromLTRB(1, 2, 3, 4)));
});
test('arcTo', () {
path.arcToOval(
toSkRect(const ui.Rect.fromLTRB(1, 2, 3, 4)),
5,
40,
false,
);
});
test('overloaded arcTo (used for arcToPoint)', () {
path.arcToRotated(
1,
2,
3,
false,
true,
4,
5,
);
});
test('close', () {
_testClosedSkPath();
});
test('conicTo', () {
path.conicTo(1, 2, 3, 4, 5);
});
test('contains', () {
final SkPath testPath = _testClosedSkPath();
expect(testPath.contains(15, 15), isTrue);
expect(testPath.contains(100, 100), isFalse);
});
test('cubicTo', () {
path.cubicTo(1, 2, 3, 4, 5, 6);
});
test('getBounds', () {
final SkPath testPath = _testClosedSkPath();
final ui.Rect bounds = fromSkRect(testPath.getBounds());
expect(bounds, const ui.Rect.fromLTRB(10, 10, 20, 20));
});
test('lineTo', () {
path.lineTo(10, 10);
});
test('moveTo', () {
path.moveTo(10, 10);
});
test('quadTo', () {
path.quadTo(10, 10, 20, 20);
});
test('rArcTo', () {
path.rArcTo(
10,
20,
30,
false,
true,
40,
50,
);
});
test('rConicTo', () {
path.rConicTo(1, 2, 3, 4, 5);
});
test('rCubicTo', () {
path.rCubicTo(1, 2, 3, 4, 5, 6);
});
test('rLineTo', () {
path.rLineTo(10, 10);
});
test('rMoveTo', () {
path.rMoveTo(10, 10);
});
test('rQuadTo', () {
path.rQuadTo(10, 10, 20, 20);
});
test('reset', () {
final SkPath testPath = _testClosedSkPath();
expect(fromSkRect(testPath.getBounds()),
const ui.Rect.fromLTRB(10, 10, 20, 20));
testPath.reset();
expect(fromSkRect(testPath.getBounds()), ui.Rect.zero);
});
test('toSVGString', () {
expect(
_testClosedSkPath().toSVGString(), 'M10 10L20 10L20 20L10 20L10 10Z');
});
test('isEmpty', () {
expect(SkPath().isEmpty(), isTrue);
expect(_testClosedSkPath().isEmpty(), isFalse);
});
test('copy', () {
final SkPath original = _testClosedSkPath();
final SkPath copy = original.copy();
expect(fromSkRect(original.getBounds()), fromSkRect(copy.getBounds()));
});
test('transform', () {
path = _testClosedSkPath();
path.transform(2, 0, 10, 0, 2, 10, 0, 0, 0);
final ui.Rect transformedBounds = fromSkRect(path.getBounds());
expect(transformedBounds, const ui.Rect.fromLTRB(30, 30, 50, 50));
});
test('SkContourMeasureIter/SkContourMeasure', () {
final SkContourMeasureIter iter =
SkContourMeasureIter(_testClosedSkPath(), false, 1.0);
final SkContourMeasure measure1 = iter.next()!;
expect(measure1.length(), 40);
expect(measure1.getPosTan(5), Float32List.fromList(<double>[15, 10, 1, 0]));
expect(
measure1.getPosTan(15), Float32List.fromList(<double>[20, 15, 0, 1]));
expect(measure1.isClosed(), isTrue);
// Starting with a box path:
//
// 10 20
// 10 +-----------+
// | |
// | |
// | |
// | |
// | |
// 20 +-----------+
//
// Cut out the top-right quadrant:
//
// 10 15 20
// 10 +-----+=====+
// | ║+++++║
// | ║+++++║
// | +=====+ 15
// | |
// | |
// 20 +-----------+
final SkPath segment = measure1.getSegment(5, 15, true);
expect(fromSkRect(segment.getBounds()),
const ui.Rect.fromLTRB(15, 10, 20, 15));
final SkContourMeasure? measure2 = iter.next();
expect(measure2, isNull);
});
test('SkPath.toCmds and CanvasKit.Path.MakeFromCmds', () {
const ui.Rect rect = ui.Rect.fromLTRB(0, 0, 10, 10);
final SkPath path = SkPath();
path.addRect(toSkRect(rect));
expect(path.toCmds(), <num>[
0, 0, 0, // moveTo
1, 10, 0, // lineTo
1, 10, 10, // lineTo
1, 0, 10, // lineTo
5, // close
]);
final SkPath copy = canvasKit.Path.MakeFromCmds(path.toCmds());
expect(fromSkRect(copy.getBounds()), rect);
});
}
SkVertices _testVertices() {
return canvasKit.MakeVertices(
canvasKit.VertexMode.Triangles,
Float32List.fromList(<double>[0, 0, 10, 10, 0, 20]),
Float32List.fromList(<double>[0, 0, 10, 10, 0, 20]),
Uint32List.fromList(<int>[0xffff0000, 0xff00ff00, 0xff0000ff]),
Uint16List.fromList(<int>[0, 1, 2]),
);
}
void _skVerticesTests() {
test('SkVertices', () {
expect(_testVertices(), isNotNull);
});
}
void _pictureTests() {
late SkPicture picture;
setUp(() {
final SkPictureRecorder recorder = SkPictureRecorder();
final SkCanvas canvas = recorder.beginRecording(toSkRect(ui.Rect.largest));
canvas.drawRect(toSkRect(const ui.Rect.fromLTRB(20, 30, 40, 50)),
SkPaint()..setColorInt(0xffff00ff));
picture = recorder.finishRecordingAsPicture();
});
test('cullRect', () {
expect(
fromSkRect(picture.cullRect()), const ui.Rect.fromLTRB(20, 30, 40, 50));
});
test('approximateBytesUsed', () {
expect(picture.approximateBytesUsed() > 0, isTrue);
});
}
void _canvasTests() {
late SkPictureRecorder recorder;
late SkCanvas canvas;
setUp(() {
recorder = SkPictureRecorder();
canvas = recorder
.beginRecording(toSkRect(const ui.Rect.fromLTRB(0, 0, 100, 100)));
});
tearDown(() {
expect(recorder.finishRecordingAsPicture(), isNotNull);
});
test('save/getSaveCount/restore/restoreToCount', () {
expect(canvas.save(), 1);
expect(canvas.save(), 2);
expect(canvas.save(), 3);
expect(canvas.save(), 4);
expect(canvas.getSaveCount(), 5);
canvas.restoreToCount(2);
expect(canvas.getSaveCount(), 2);
canvas.restore();
expect(canvas.getSaveCount(), 1);
});
test('saveLayer', () {
canvas.saveLayer(
SkPaint(),
toSkRect(const ui.Rect.fromLTRB(0, 0, 100, 100)),
null,
null,
);
});
test('saveLayer without bounds', () {
canvas.saveLayer(SkPaint(), null, null, null);
});
test('saveLayer with filter', () {
canvas.saveLayer(
SkPaint(),
toSkRect(const ui.Rect.fromLTRB(0, 0, 100, 100)),
canvasKit.ImageFilter.MakeBlur(1, 2, canvasKit.TileMode.Repeat, null),
0,
);
});
test('clear', () {
canvas.clear(Float32List.fromList(<double>[0, 0, 0, 0]));
});
test('clipPath', () {
canvas.clipPath(
SkPath()
..moveTo(10.9, 10.9)
..lineTo(19.1, 10.9)
..lineTo(19.1, 19.1)
..lineTo(10.9, 19.1),
canvasKit.ClipOp.Intersect,
true,
);
expect(canvas.getDeviceClipBounds(), <int>[10, 10, 20, 20]);
});
test('clipRRect', () {
canvas.clipRRect(
Float32List.fromList(<double>[0.9, 0.9, 99.1, 99.1, 1, 2, 3, 4, 5, 6, 7, 8]),
canvasKit.ClipOp.Intersect,
true,
);
expect(canvas.getDeviceClipBounds(), <int>[0, 0, 100, 100]);
});
test('clipRect', () {
canvas.clipRect(
Float32List.fromList(<double>[0.9, 0.9, 99.1, 99.1]),
canvasKit.ClipOp.Intersect,
true,
);
expect(canvas.getDeviceClipBounds(), <int>[0, 0, 100, 100]);
});
test('drawArc', () {
canvas.drawArc(
Float32List.fromList(<double>[0, 0, 100, 50]),
0,
100,
true,
SkPaint(),
);
});
test('drawAtlas', () {
final SkAnimatedImage image =
canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!;
canvas.drawAtlas(
image.makeImageAtCurrentFrame(),
Float32List.fromList(<double>[0, 0, 1, 1]),
Float32List.fromList(<double>[1, 0, 2, 3]),
SkPaint(),
canvasKit.BlendMode.SrcOver,
Uint32List.fromList(<int>[0xff000000, 0xffffffff]),
);
});
test('drawCircle', () {
canvas.drawCircle(1, 2, 3, SkPaint());
});
test('drawColorInt', () {
canvas.drawColorInt(0xFFFFFFFF, canvasKit.BlendMode.SoftLight);
});
test('drawDRRect', () {
canvas.drawDRRect(
Float32List.fromList(<double>[0, 0, 100, 100, 1, 2, 3, 4, 5, 6, 7, 8]),
Float32List.fromList(<double>[20, 20, 80, 80, 1, 2, 3, 4, 5, 6, 7, 8]),
SkPaint(),
);
});
test('drawImageOptions', () {
final SkAnimatedImage image =
canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!;
canvas.drawImageOptions(
image.makeImageAtCurrentFrame(),
10,
20,
canvasKit.FilterMode.Linear,
canvasKit.MipmapMode.None,
SkPaint(),
);
});
test('drawImageCubic', () {
final SkAnimatedImage image =
canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!;
canvas.drawImageCubic(
image.makeImageAtCurrentFrame(),
10,
20,
0.3,
0.3,
SkPaint(),
);
});
test('drawImageRectOptions', () {
final SkAnimatedImage image =
canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!;
canvas.drawImageRectOptions(
image.makeImageAtCurrentFrame(),
Float32List.fromList(<double>[0, 0, 1, 1]),
Float32List.fromList(<double>[0, 0, 1, 1]),
canvasKit.FilterMode.Linear,
canvasKit.MipmapMode.None,
SkPaint(),
);
});
test('drawImageRectCubic', () {
final SkAnimatedImage image =
canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!;
canvas.drawImageRectCubic(
image.makeImageAtCurrentFrame(),
Float32List.fromList(<double>[0, 0, 1, 1]),
Float32List.fromList(<double>[0, 0, 1, 1]),
0.3,
0.3,
SkPaint(),
);
});
test('drawImageNine', () {
final SkAnimatedImage image =
canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!;
canvas.drawImageNine(
image.makeImageAtCurrentFrame(),
Float32List.fromList(<double>[0, 0, 1, 1]),
Float32List.fromList(<double>[0, 0, 1, 1]),
canvasKit.FilterMode.Linear,
SkPaint(),
);
});
test('drawLine', () {
canvas.drawLine(0, 1, 2, 3, SkPaint());
});
test('drawOval', () {
canvas.drawOval(Float32List.fromList(<double>[0, 0, 1, 1]), SkPaint());
});
test('drawPaint', () {
canvas.drawPaint(SkPaint());
});
test('drawPath', () {
canvas.drawPath(
_testClosedSkPath(),
SkPaint(),
);
});
test('drawPoints', () {
canvas.drawPoints(
canvasKit.PointMode.Lines,
Float32List.fromList(<double>[0, 0, 10, 10, 0, 10]),
SkPaint(),
);
});
test('drawRRect', () {
canvas.drawRRect(
Float32List.fromList(<double>[0, 0, 100, 100, 1, 2, 3, 4, 5, 6, 7, 8]),
SkPaint(),
);
});
test('drawRect', () {
canvas.drawRect(
Float32List.fromList(<double>[0, 0, 100, 100]),
SkPaint(),
);
});
test('drawShadow', () {
for (final int flags in const <int>[0x01, 0x00]) {
const double devicePixelRatio = 2.0;
const double elevation = 4.0;
const double ambientAlpha = 0.039;
const double spotAlpha = 0.25;
final SkPath path = _testClosedSkPath();
final ui.Rect bounds = fromSkRect(path.getBounds());
final double shadowX = (bounds.left + bounds.right) / 2.0;
final double shadowY = bounds.top - 600.0;
const ui.Color color = ui.Color(0xAABBCCDD);
final ui.Color inAmbient =
color.withAlpha((color.alpha * ambientAlpha).round());
final ui.Color inSpot =
color.withAlpha((color.alpha * spotAlpha).round());
final SkTonalColors inTonalColors = SkTonalColors(
ambient: makeFreshSkColor(inAmbient),
spot: makeFreshSkColor(inSpot),
);
final SkTonalColors tonalColors =
canvasKit.computeTonalColors(inTonalColors);
canvas.drawShadow(
path,
Float32List(3)..[2] = devicePixelRatio * elevation,
Float32List(3)
..[0] = shadowX
..[1] = shadowY
..[2] = devicePixelRatio * kLightHeight,
devicePixelRatio * kLightRadius,
tonalColors.ambient,
tonalColors.spot,
flags.toDouble(),
);
}
});
test('drawVertices', () {
canvas.drawVertices(
_testVertices(),
canvasKit.BlendMode.SrcOver,
SkPaint(),
);
});
test('rotate', () {
canvas.rotate(90, 10, 20);
expect(canvas.getLocalToDevice(), <double>[
0, -1, 0, 30, // tx = 10 - (-20) == 30
1, 0, 0, 10, // ty = 20 - 10 == 10
0, 0, 1, 0,
0, 0, 0, 1,
]);
});
test('scale', () {
canvas.scale(2, 3);
expect(canvas.getLocalToDevice(), <double>[
2, 0, 0, 0,
0, 3, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
]);
});
test('skew', () {
canvas.skew(4, 5);
expect(canvas.getLocalToDevice(), <double>[
1, 4, 0, 0,
5, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
]);
});
test('concat', () {
canvas.concat(toSkM44FromFloat32(Matrix4.identity().storage));
expect(canvas.getLocalToDevice(), <double>[
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
]);
canvas.concat(Float32List.fromList(<double>[
11, 12, 13, 14,
21, 22, 23, 24,
31, 32, 33, 34,
41, 42, 43, 44,
]));
expect(canvas.getLocalToDevice(), <double>[
11, 12, 13, 14,
21, 22, 23, 24,
31, 32, 33, 34,
41, 42, 43, 44,
]);
});
test('translate', () {
canvas.translate(4, 5);
expect(canvas.getLocalToDevice(), <double>[
1, 0, 0, 4,
0, 1, 0, 5,
0, 0, 1, 0,
0, 0, 0, 1,
]);
});
test('drawPicture', () {
final SkPictureRecorder otherRecorder = SkPictureRecorder();
final SkCanvas otherCanvas = otherRecorder
.beginRecording(Float32List.fromList(<double>[0, 0, 100, 100]));
otherCanvas.drawLine(0, 0, 10, 10, SkPaint());
canvas.drawPicture(otherRecorder.finishRecordingAsPicture());
});
test('drawParagraph', () {
final CkParagraphBuilder builder = CkParagraphBuilder(
CkParagraphStyle(),
);
builder.addText('Hello');
final CkParagraph paragraph = builder.build();
paragraph.layout(const ui.ParagraphConstraints(width: 100));
canvas.drawParagraph(
paragraph.skiaObject,
10,
20,
);
});
test('Paragraph converts caret position to charactor position', () {
final CkParagraphBuilder builder = CkParagraphBuilder(
CkParagraphStyle(),
);
builder.addText('Hello there');
final CkParagraph paragraph = builder.build();
paragraph.layout(const ui.ParagraphConstraints(width: 100));
ui.TextRange range = paragraph.getWordBoundary(const ui.TextPosition(offset: 5, affinity: ui.TextAffinity.upstream));
expect(range.start, 0);
expect(range.end, 5);
range = paragraph.getWordBoundary(const ui.TextPosition(offset: 5));
expect(range.start, 5);
expect(range.end, 6);
});
test('Paragraph dispose', () {
final CkParagraphBuilder builder = CkParagraphBuilder(
CkParagraphStyle(),
);
builder.addText('Hello');
final CkParagraph paragraph = builder.build();
paragraph.dispose();
expect(paragraph.debugDisposed, true);
});
test('toImage.toByteData', () async {
final SkPictureRecorder otherRecorder = SkPictureRecorder();
final SkCanvas otherCanvas = otherRecorder
.beginRecording(Float32List.fromList(<double>[0, 0, 1, 1]));
otherCanvas.drawRect(
Float32List.fromList(<double>[0, 0, 1, 1]),
SkPaint()..setColorInt(0xAAFFFFFF),
);
final CkPicture picture =
CkPicture(otherRecorder.finishRecordingAsPicture());
final CkImage image = await picture.toImage(1, 1) as CkImage;
final ByteData rawData =
await image.toByteData();
expect(rawData.lengthInBytes, greaterThan(0));
expect(
rawData.buffer.asUint32List(),
<int>[0xAAAAAAAA],
);
final ByteData rawStraightData =
await image.toByteData(format: ui.ImageByteFormat.rawStraightRgba);
expect(rawStraightData.lengthInBytes, greaterThan(0));
expect(
rawStraightData.buffer.asUint32List(),
<int>[0xAAFFFFFF],
);
final ByteData pngData =
await image.toByteData(format: ui.ImageByteFormat.png);
expect(pngData.lengthInBytes, greaterThan(0));
});
}
void _textStyleTests() {
test('SkTextDecorationStyle mapping is correct', () {
expect(canvasKit.DecorationStyle.Solid.value,
ui.TextDecorationStyle.solid.index);
expect(canvasKit.DecorationStyle.Double.value,
ui.TextDecorationStyle.double.index);
expect(canvasKit.DecorationStyle.Dotted.value,
ui.TextDecorationStyle.dotted.index);
expect(canvasKit.DecorationStyle.Dashed.value,
ui.TextDecorationStyle.dashed.index);
expect(canvasKit.DecorationStyle.Wavy.value,
ui.TextDecorationStyle.wavy.index);
});
test('ui.TextDecorationStyle converts to SkTextDecorationStyle', () {
for (final ui.TextDecorationStyle decorationStyle
in ui.TextDecorationStyle.values) {
expect(toSkTextDecorationStyle(decorationStyle).value,
decorationStyle.index);
}
});
test('SkTextBaseline mapping is correct', () {
expect(canvasKit.TextBaseline.Alphabetic.value,
ui.TextBaseline.alphabetic.index);
expect(canvasKit.TextBaseline.Ideographic.value,
ui.TextBaseline.ideographic.index);
});
test('ui.TextBaseline converts to SkTextBaseline', () {
for (final ui.TextBaseline textBaseline in ui.TextBaseline.values) {
expect(toSkTextBaseline(textBaseline).value, textBaseline.index);
}
});
test('SkPlaceholderAlignment mapping is correct', () {
expect(canvasKit.PlaceholderAlignment.Baseline.value,
ui.PlaceholderAlignment.baseline.index);
expect(canvasKit.PlaceholderAlignment.AboveBaseline.value,
ui.PlaceholderAlignment.aboveBaseline.index);
expect(canvasKit.PlaceholderAlignment.BelowBaseline.value,
ui.PlaceholderAlignment.belowBaseline.index);
expect(canvasKit.PlaceholderAlignment.Top.value,
ui.PlaceholderAlignment.top.index);
expect(canvasKit.PlaceholderAlignment.Bottom.value,
ui.PlaceholderAlignment.bottom.index);
expect(canvasKit.PlaceholderAlignment.Middle.value,
ui.PlaceholderAlignment.middle.index);
});
test('ui.PlaceholderAlignment converts to SkPlaceholderAlignment', () {
for (final ui.PlaceholderAlignment placeholderAlignment
in ui.PlaceholderAlignment.values) {
expect(toSkPlaceholderAlignment(placeholderAlignment).value,
placeholderAlignment.index);
}
});
}
void _paragraphTests() {
// This test is just a kitchen sink that blasts CanvasKit with all paragraph
// properties all at once, making sure CanvasKit doesn't choke on anything.
// In particular, this tests that our JS bindings are correct, such as that
// arguments are of acceptable types and passed in the correct order.
test('kitchensink', () async {
final SkParagraphStyleProperties props = SkParagraphStyleProperties();
props.textAlign = canvasKit.TextAlign.Left;
props.textDirection = canvasKit.TextDirection.RTL;
props.heightMultiplier = 3;
props.textHeightBehavior = canvasKit.TextHeightBehavior.All;
props.maxLines = 4;
props.ellipsis = '___';
props.textStyle = SkTextStyleProperties()
..backgroundColor = Float32List.fromList(<double>[0.2, 0, 0, 0.5])
..color = Float32List.fromList(<double>[0, 1, 0, 1])
..foregroundColor = Float32List.fromList(<double>[1, 0, 1, 1])
..decoration = 0x2
..decorationThickness = 2.0
..decorationColor = Float32List.fromList(<double>[13, 14, 15, 16])
..decorationStyle = canvasKit.DecorationStyle.Dotted
..textBaseline = canvasKit.TextBaseline.Ideographic
..fontSize = 48
..letterSpacing = 5
..wordSpacing = 10
..heightMultiplier = 1.3
..halfLeading = true
..locale = 'en_CA'
..fontFamilies = <String>['Roboto', 'serif']
..fontStyle = (SkFontStyle()
..slant = canvasKit.FontSlant.Upright
..weight = canvasKit.FontWeight.Normal)
..shadows = <SkTextShadow>[]
..fontFeatures = <SkFontFeature>[
SkFontFeature()
..name = 'pnum'
..value = 1,
SkFontFeature()
..name = 'tnum'
..value = 1,
]
;
props.strutStyle = SkStrutStyleProperties()
..fontFamilies = <String>['Roboto', 'Noto']
..fontStyle = (SkFontStyle()
..slant = canvasKit.FontSlant.Italic
..weight = canvasKit.FontWeight.Bold)
..fontSize = 72
..heightMultiplier = 1.5
..halfLeading = false
..leading = 0
..strutEnabled = true
..forceStrutHeight = false;
final SkParagraphStyle paragraphStyle = canvasKit.ParagraphStyle(props);
final SkParagraphBuilder builder = canvasKit.ParagraphBuilder.MakeFromFontCollection(
paragraphStyle,
CanvasKitRenderer.instance.fontCollection.skFontCollection,
);
builder.addText('Hello');
builder.addPlaceholder(
50,
25,
canvasKit.PlaceholderAlignment.Middle,
canvasKit.TextBaseline.Ideographic,
4.0,
);
builder.pushStyle(canvasKit.TextStyle(SkTextStyleProperties()
..color = Float32List.fromList(<double>[1, 0, 0, 1])
..fontSize = 24
..fontFamilies = <String>['Roboto', 'serif']
));
builder.addText('World');
builder.pop();
builder.pushPaintStyle(
canvasKit.TextStyle(SkTextStyleProperties()
..color = Float32List.fromList(<double>[1, 0, 0, 1])
..fontSize = 60
..fontFamilies = <String>['Roboto', 'serif']
),
SkPaint()..setColorInt(0xFF0000FF),
SkPaint()..setColorInt(0xFFFF0000),
);
builder.addText('!');
builder.pop();
builder.pushStyle(
canvasKit.TextStyle(SkTextStyleProperties()..halfLeading = true));
builder.pop();
if (canvasKit.ParagraphBuilder.RequiresClientICU()) {
injectClientICU(builder);
}
final SkParagraph paragraph = builder.build();
paragraph.layout(500);
final DomCanvasElement canvas = createDomCanvasElement(
width: 400,
height: 160,
);
domDocument.body!.append(canvas);
// TODO(yjbanov): WebGL screenshot tests do not work on Firefox - https://github.com/flutter/flutter/issues/109265
if (!isFirefox) {
final SkSurface surface = canvasKit.MakeWebGLCanvasSurface(canvas);
final SkCanvas skCanvas = surface.getCanvas();
skCanvas.drawColorInt(0xFFCCCCCC, toSkBlendMode(ui.BlendMode.srcOver));
skCanvas.drawParagraph(paragraph, 20, 20);
skCanvas.drawRect(
Float32List.fromList(<double>[20, 20, 20 + paragraph.getMaxIntrinsicWidth(), 20 + paragraph.getHeight()]),
SkPaint()
..setStyle(toSkPaintStyle(ui.PaintingStyle.stroke))
..setStrokeWidth(1)
..setColorInt(0xFF00FF00),
);
surface.flush();
await matchGoldenFile(
'paragraph_kitchen_sink.png',
region: const ui.Rect.fromLTRB(0, 0, 400, 160),
);
}
void expectAlmost(double actual, double expected) {
expect(actual, within<double>(distance: actual / 100, from: expected));
}
expectAlmost(paragraph.getAlphabeticBaseline(), 85.5);
expect(paragraph.didExceedMaxLines(), isFalse);
expectAlmost(paragraph.getHeight(), 108);
expectAlmost(paragraph.getIdeographicBaseline(), 108);
expectAlmost(paragraph.getLongestLine(), 263);
expectAlmost(paragraph.getMaxIntrinsicWidth(), 263);
expectAlmost(paragraph.getMinIntrinsicWidth(), 135);
expectAlmost(paragraph.getMaxWidth(), 500);
final SkRectWithDirection rectWithDirection =
paragraph.getRectsForRange(
1,
3,
canvasKit.RectHeightStyle.Tight,
canvasKit.RectWidthStyle.Max).single;
expect(
rectWithDirection.rect,
hasLength(4),
);
expect(paragraph.getRectsForPlaceholders(), hasLength(1));
expect(paragraph.getLineMetrics(), hasLength(1));
final SkLineMetrics lineMetrics =
paragraph.getLineMetrics().single;
expectAlmost(lineMetrics.ascent, 55.6);
expectAlmost(lineMetrics.descent, 14.8);
expect(lineMetrics.isHardBreak, isTrue);
expectAlmost(lineMetrics.baseline, 85.5);
expectAlmost(lineMetrics.height, 108);
expectAlmost(lineMetrics.left, 2.5);
expectAlmost(lineMetrics.width, 263);
expect(lineMetrics.lineNumber, 0);
expect(
paragraph.getGlyphPositionAtCoordinate(5, 5).affinity,
canvasKit.Affinity.Upstream,
);
// "Hello"
for (int i = 0; i < 5; i++) {
expect(paragraph.getWordBoundary(i.toDouble()).start, 0);
expect(paragraph.getWordBoundary(i.toDouble()).end, 5);
}
// Placeholder
expect(paragraph.getWordBoundary(5).start, 5);
expect(paragraph.getWordBoundary(5).end, 6);
// "World"
for (int i = 6; i < 11; i++) {
expect(paragraph.getWordBoundary(i.toDouble()).start, 6);
expect(paragraph.getWordBoundary(i.toDouble()).end, 11);
}
// "!"
expect(paragraph.getWordBoundary(11).start, 11);
expect(paragraph.getWordBoundary(11).end, 12);
paragraph.delete();
});
test('RectHeightStyle', () {
final SkParagraphStyleProperties props = SkParagraphStyleProperties();
props.heightMultiplier = 3;
props.textAlign = canvasKit.TextAlign.Start;
props.textDirection = canvasKit.TextDirection.LTR;
props.textStyle = SkTextStyleProperties()
..fontSize = 25
..fontFamilies = <String>['Roboto']
..fontStyle = (SkFontStyle()..weight = canvasKit.FontWeight.Normal);
props.strutStyle = SkStrutStyleProperties()
..strutEnabled = true
..forceStrutHeight = true
..fontSize = 25
..fontFamilies = <String>['Roboto']
..heightMultiplier = 3
..fontStyle = (SkFontStyle()..weight = canvasKit.FontWeight.Normal);
final SkParagraphStyle paragraphStyle = canvasKit.ParagraphStyle(props);
final SkParagraphBuilder builder =
canvasKit.ParagraphBuilder.MakeFromFontCollection(
paragraphStyle,
CanvasKitRenderer.instance.fontCollection.skFontCollection,
);
builder.addText('hello');
if (canvasKit.ParagraphBuilder.RequiresClientICU()) {
injectClientICU(builder);
}
final SkParagraph paragraph = builder.build();
paragraph.layout(500);
final List<SkRectWithDirection> rects = paragraph.getRectsForRange(
0,
1,
canvasKit.RectHeightStyle.Strut,
canvasKit.RectWidthStyle.Tight,
);
expect(rects.length, 1);
final SkRectWithDirection rect = rects.first;
expect(rect.rect, <double>[0, 0, 13.770000457763672, 75]);
});
test('TextHeightBehavior', () {
expect(
toSkTextHeightBehavior(const ui.TextHeightBehavior()),
canvasKit.TextHeightBehavior.All,
);
expect(
toSkTextHeightBehavior(const ui.TextHeightBehavior(
applyHeightToFirstAscent: false,
)),
canvasKit.TextHeightBehavior.DisableFirstAscent,
);
expect(
toSkTextHeightBehavior(const ui.TextHeightBehavior(
applyHeightToLastDescent: false,
)),
canvasKit.TextHeightBehavior.DisableLastDescent,
);
expect(
toSkTextHeightBehavior(const ui.TextHeightBehavior(
applyHeightToFirstAscent: false,
applyHeightToLastDescent: false,
)),
canvasKit.TextHeightBehavior.DisableAll,
);
});
test('MakeOnScreenGLSurface test', () {
final DomCanvasElement canvas = createDomCanvasElement(
width: 100,
height: 100,
);
final WebGLContext gl = canvas.getGlContext(webGLVersion);
final int sampleCount = gl.getParameter(gl.samples);
final int stencilBits = gl.getParameter(gl.stencilBits);
final double glContext = canvasKit.GetWebGLContext(
canvas,
SkWebGLContextOptions(
antialias: 0,
majorVersion: webGLVersion.toDouble(),
),
);
final SkGrContext grContext = canvasKit.MakeGrContext(glContext);
final SkSurface? skSurface = canvasKit.MakeOnScreenGLSurface(
grContext,
100,
100,
SkColorSpaceSRGB,
sampleCount,
stencilBits
);
expect(skSurface, isNotNull);
}, skip: isFirefox); // Intended: Headless firefox has no webgl support https://github.com/flutter/flutter/issues/109265
test('MakeRenderTarget test', () {
final DomCanvasElement canvas = createDomCanvasElement(
width: 100,
height: 100,
);
final int glContext = canvasKit.GetWebGLContext(
canvas,
SkWebGLContextOptions(
antialias: 0,
majorVersion: webGLVersion.toDouble(),
),
).toInt();
final SkGrContext grContext = canvasKit.MakeGrContext(glContext.toDouble());
final SkSurface? surface = canvasKit.MakeRenderTarget(grContext, 1, 1);
expect(surface, isNotNull);
}, skip: isFirefox); // Intended: Headless firefox has no webgl support https://github.com/flutter/flutter/issues/109265
group('getCanvasKitJsFileNames', () {
JSAny? oldV8BreakIterator = v8BreakIterator;
JSAny? oldIntlSegmenter = intlSegmenter;
setUp(() {
oldV8BreakIterator = v8BreakIterator;
oldIntlSegmenter = intlSegmenter;
});
tearDown(() {
v8BreakIterator = oldV8BreakIterator;
intlSegmenter = oldIntlSegmenter;
debugResetBrowserSupportsImageDecoder();
});
test('in Chromium-based browsers', () {
v8BreakIterator = Object().toJSBox; // Any non-null value.
intlSegmenter = Object().toJSBox; // Any non-null value.
browserSupportsImageDecoder = true;
expect(getCanvasKitJsFileNames(CanvasKitVariant.full), <String>['canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.chromium), <String>['chromium/canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.auto), <String>[
'chromium/canvaskit.js',
'canvaskit.js',
]);
});
test('in older versions of Chromium-based browsers', () {
v8BreakIterator = Object().toJSBox; // Any non-null value.
intlSegmenter = null; // Older versions of Chromium didn't have the Intl.Segmenter API.
browserSupportsImageDecoder = true;
expect(getCanvasKitJsFileNames(CanvasKitVariant.full), <String>['canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.chromium), <String>['chromium/canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.auto), <String>['canvaskit.js']);
});
test('in other browsers', () {
intlSegmenter = Object().toJSBox; // Any non-null value.
v8BreakIterator = null;
browserSupportsImageDecoder = true;
expect(getCanvasKitJsFileNames(CanvasKitVariant.full), <String>['canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.chromium), <String>['chromium/canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.auto), <String>['canvaskit.js']);
v8BreakIterator = Object().toJSBox;
browserSupportsImageDecoder = false;
// TODO(mdebbar): we don't check image codecs for now.
// https://github.com/flutter/flutter/issues/122331
expect(getCanvasKitJsFileNames(CanvasKitVariant.full), <String>['canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.chromium), <String>['chromium/canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.auto), <String>['chromium/canvaskit.js', 'canvaskit.js']);
v8BreakIterator = null;
browserSupportsImageDecoder = false;
expect(getCanvasKitJsFileNames(CanvasKitVariant.full), <String>['canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.chromium), <String>['chromium/canvaskit.js']);
expect(getCanvasKitJsFileNames(CanvasKitVariant.auto), <String>['canvaskit.js']);
});
});
test('respects actual location of canvaskit files', () {
expect(
canvasKitWasmModuleUrl('canvaskit.wasm', 'https://example.com/'),
'https://example.com/canvaskit.wasm',
);
expect(
canvasKitWasmModuleUrl('canvaskit.wasm', 'http://localhost:1234/'),
'http://localhost:1234/canvaskit.wasm',
);
expect(
canvasKitWasmModuleUrl('canvaskit.wasm', 'http://localhost:1234/foo/'),
'http://localhost:1234/foo/canvaskit.wasm',
);
});
test('SkObjectFinalizationRegistry', () {
// There's no reliable way to test the actual functionality of
// FinalizationRegistry because it depends on GC, which cannot be controlled,
// So the test simply tests that a FinalizationRegistry can be constructed
// and its `register` method can be called.
final DomFinalizationRegistry registry = DomFinalizationRegistry((String arg) {}.toJS);
registry.register(Object().toJSWrapper, Object().toJSWrapper);
});
}
@JS('window.Intl.v8BreakIterator')
external JSAny? get v8BreakIterator;
@JS('window.Intl.v8BreakIterator')
external set v8BreakIterator(JSAny? x);
@JS('window.Intl.Segmenter')
external JSAny? get intlSegmenter;
@JS('window.Intl.Segmenter')
external set intlSegmenter(JSAny? x);
| engine/lib/web_ui/test/canvaskit/canvaskit_api_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/canvaskit/canvaskit_api_test.dart",
"repo_id": "engine",
"token_count": 24840
} | 306 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'common.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
group('CanvasKit', () {
setUpCanvasKitTest(withImplicitView: true);
// Regression test for https://github.com/flutter/flutter/issues/63715
test('TransformLayer prerolls correctly', () async {
final CkPicture picture =
paintPicture(const ui.Rect.fromLTRB(0, 0, 60, 60), (CkCanvas canvas) {
canvas.drawRect(const ui.Rect.fromLTRB(0, 0, 60, 60),
CkPaint()..style = ui.PaintingStyle.fill);
});
final LayerSceneBuilder sb = LayerSceneBuilder();
sb.pushClipRect(const ui.Rect.fromLTRB(15, 15, 30, 30));
// Intentionally use a perspective transform, which triggered the
// https://github.com/flutter/flutter/issues/63715 bug.
sb.pushTransform(Float64List.fromList(
Matrix4.identity().storage..[15] = 2,
));
sb.addPicture(ui.Offset.zero, picture);
final LayerScene scene = sb.build();
final LayerTree layerTree = scene.layerTree;
await renderScene(scene);
final ClipRectEngineLayer clipRect =
layerTree.rootLayer.debugLayers.single as ClipRectEngineLayer;
expect(clipRect.paintBounds, const ui.Rect.fromLTRB(15, 15, 30, 30));
final TransformEngineLayer transform =
clipRect.debugLayers.single as TransformEngineLayer;
expect(transform.paintBounds, const ui.Rect.fromLTRB(0, 0, 30, 30));
});
test('can push a leaf layer without a container layer', () async {
final CkPictureRecorder recorder = CkPictureRecorder();
recorder.beginRecording(ui.Rect.zero);
LayerSceneBuilder().addPicture(ui.Offset.zero, recorder.endRecording());
});
test('null ViewEmbedder with PlatformView', () async {
final LayerSceneBuilder sb = LayerSceneBuilder();
const ui.Rect kDefaultRegion = ui.Rect.fromLTRB(0, 0, 200, 200);
await createPlatformView(0, 'test-platform-view');
sb.pushOffset(0, 0);
sb.addPlatformView(0, width: 10, height: 10);
sb.pushOffset(0, 0);
final LayerScene layerScene = sb.build();
final ui.Image testImage = await layerScene.toImage(100, 100);
final CkPictureRecorder recorder = CkPictureRecorder();
final CkCanvas canvas = recorder.beginRecording(kDefaultRegion);
canvas.drawImage(testImage as CkImage, ui.Offset.zero, CkPaint());
await matchPictureGolden(
'canvaskit_picture.png',
recorder.endRecording(),
region: kDefaultRegion,
);
});
test('ImageFilter layer applies matrix in preroll', () async {
final CkPicture picture = paintPicture(
const ui.Rect.fromLTRB(0, 0, 100, 100), (CkCanvas canvas) {
canvas.drawRect(const ui.Rect.fromLTRB(0, 0, 100, 100),
CkPaint()..style = ui.PaintingStyle.fill);
});
final LayerSceneBuilder sb = LayerSceneBuilder();
sb.pushImageFilter(
ui.ImageFilter.matrix(
(Matrix4.identity()
..scale(0.5, 0.5)
..translate(20))
.toFloat64(),
),
);
sb.addPicture(ui.Offset.zero, picture);
final LayerScene scene = sb.build();
final LayerTree layerTree = scene.layerTree;
await renderScene(scene);
final ImageFilterEngineLayer imageFilterLayer =
layerTree.rootLayer.debugLayers.single as ImageFilterEngineLayer;
expect(
imageFilterLayer.paintBounds, const ui.Rect.fromLTRB(10, 0, 60, 50));
});
test('Opacity layer works correctly with Scene.toImage', () async {
// This is a regression test for https://github.com/flutter/flutter/issues/138009
final CkPicture picture = paintPicture(
const ui.Rect.fromLTRB(0, 0, 100, 100), (CkCanvas canvas) {
canvas.drawRect(const ui.Rect.fromLTRB(0, 0, 100, 100),
CkPaint()..style = ui.PaintingStyle.fill);
});
final LayerSceneBuilder sb = LayerSceneBuilder();
sb.pushTransform(Matrix4.identity().toFloat64());
sb.pushOpacity(97, offset: const ui.Offset(20, 20));
sb.addPicture(ui.Offset.zero, picture);
final LayerScene scene = sb.build();
final ui.Image testImage = await scene.toImage(200, 200);
final CkPictureRecorder recorder = CkPictureRecorder();
final CkCanvas canvas =
recorder.beginRecording(const ui.Rect.fromLTRB(0, 0, 200, 200));
canvas.drawImage(testImage as CkImage, ui.Offset.zero, CkPaint());
await matchPictureGolden(
'canvaskit_scene_toimage_opacity_layer.png',
recorder.endRecording(),
region: const ui.Rect.fromLTRB(0, 0, 200, 200),
);
});
});
}
| engine/lib/web_ui/test/canvaskit/layer_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/canvaskit/layer_test.dart",
"repo_id": "engine",
"token_count": 2044
} | 307 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_util' as js_util;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'common.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
group('CanvasKit', () {
setUpCanvasKitTest();
setUp(() {
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(1.0);
});
test('Surface allocates canvases efficiently', () {
final Surface surface = Surface();
final CkSurface originalSurface =
surface.acquireFrame(const ui.Size(9, 19)).skiaSurface;
final DomOffscreenCanvas original = surface.debugOffscreenCanvas!;
// Expect exact requested dimensions.
expect(original.width, 9);
expect(original.height, 19);
expect(originalSurface.width(), 9);
expect(originalSurface.height(), 19);
// Shrinking reuses the existing canvas but translates it so
// Skia renders into the visible area.
final CkSurface shrunkSurface =
surface.acquireFrame(const ui.Size(5, 15)).skiaSurface;
final DomOffscreenCanvas shrunk = surface.debugOffscreenCanvas!;
expect(shrunk, same(original));
expect(shrunkSurface, isNot(same(originalSurface)));
expect(shrunkSurface.width(), 5);
expect(shrunkSurface.height(), 15);
// The first increase will allocate a new surface, but will overallocate
// by 40% to accommodate future increases.
final CkSurface firstIncreaseSurface =
surface.acquireFrame(const ui.Size(10, 20)).skiaSurface;
final DomOffscreenCanvas firstIncrease = surface.debugOffscreenCanvas!;
expect(firstIncrease, same(original));
expect(firstIncreaseSurface, isNot(same(shrunkSurface)));
// Expect overallocated dimensions
expect(firstIncrease.width, 14);
expect(firstIncrease.height, 28);
expect(firstIncreaseSurface.width(), 10);
expect(firstIncreaseSurface.height(), 20);
// Subsequent increases within 40% reuse the old canvas.
final CkSurface secondIncreaseSurface =
surface.acquireFrame(const ui.Size(11, 22)).skiaSurface;
final DomOffscreenCanvas secondIncrease = surface.debugOffscreenCanvas!;
expect(secondIncrease, same(firstIncrease));
expect(secondIncreaseSurface, isNot(same(firstIncreaseSurface)));
expect(secondIncreaseSurface.width(), 11);
expect(secondIncreaseSurface.height(), 22);
// Increases beyond the 40% limit will cause a new allocation.
final CkSurface hugeSurface =
surface.acquireFrame(const ui.Size(20, 40)).skiaSurface;
final DomOffscreenCanvas huge = surface.debugOffscreenCanvas!;
expect(huge, same(secondIncrease));
expect(hugeSurface, isNot(same(secondIncreaseSurface)));
// Also over-allocated
expect(huge.width, 28);
expect(huge.height, 56);
expect(hugeSurface.width(), 20);
expect(hugeSurface.height(), 40);
// Shrink again. Reuse the last allocated surface.
final CkSurface shrunkSurface2 =
surface.acquireFrame(const ui.Size(5, 15)).skiaSurface;
final DomOffscreenCanvas shrunk2 = surface.debugOffscreenCanvas!;
expect(shrunk2, same(huge));
expect(shrunkSurface2, isNot(same(hugeSurface)));
expect(shrunkSurface2.width(), 5);
expect(shrunkSurface2.height(), 15);
// Doubling the DPR should halve the CSS width, height, and translation of the canvas.
// This tests https://github.com/flutter/flutter/issues/77084
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(2.0);
final CkSurface dpr2Surface2 =
surface.acquireFrame(const ui.Size(5, 15)).skiaSurface;
final DomOffscreenCanvas dpr2Canvas = surface.debugOffscreenCanvas!;
expect(dpr2Canvas, same(huge));
expect(dpr2Surface2, isNot(same(hugeSurface)));
expect(dpr2Surface2.width(), 5);
expect(dpr2Surface2.height(), 15);
// Skipping on Firefox for now since Firefox headless doesn't support WebGL
// This causes issues in the test since we create a Canvas-backed surface,
// which cannot be a different size from the canvas.
// TODO(hterkelsen): See if we can give a custom size for software
// surfaces.
}, skip: isFirefox || !Surface.offscreenCanvasSupported);
test('Surface used as DisplayCanvas resizes correctly', () {
final Surface surface = Surface(isDisplayCanvas: true);
surface.createOrUpdateSurface(const ui.Size(9, 19));
final DomCanvasElement original = getDisplayCanvas(surface);
ui.Size canvasSize = getCssSize(surface);
// Expect exact requested dimensions.
expect(original.width, 9);
expect(original.height, 19);
expect(canvasSize.width, 9);
expect(canvasSize.height, 19);
// Shrinking reuses the existing canvas but translates it so
// Skia renders into the visible area.
surface.createOrUpdateSurface(const ui.Size(5, 15));
final DomCanvasElement shrunk = getDisplayCanvas(surface);
canvasSize = getCssSize(surface);
expect(shrunk.width, 9);
expect(shrunk.height, 19);
expect(canvasSize.width, 9);
expect(canvasSize.height, 19);
// The first increase will allocate a new surface, but will overallocate
// by 40% to accommodate future increases.
surface.createOrUpdateSurface(const ui.Size(10, 20));
final DomCanvasElement firstIncrease = getDisplayCanvas(surface);
canvasSize = getCssSize(surface);
expect(firstIncrease, same(original));
// Expect overallocated dimensions
expect(firstIncrease.width, 14);
expect(firstIncrease.height, 28);
expect(canvasSize.width, 14);
expect(canvasSize.height, 28);
// Subsequent increases within 40% reuse the old canvas.
surface.createOrUpdateSurface(const ui.Size(11, 22));
final DomCanvasElement secondIncrease = getDisplayCanvas(surface);
canvasSize = getCssSize(surface);
expect(secondIncrease, same(firstIncrease));
expect(secondIncrease.width, 14);
expect(secondIncrease.height, 28);
expect(canvasSize.width, 14);
expect(canvasSize.height, 28);
// Increases beyond the 40% limit will cause a new allocation.
surface.createOrUpdateSurface(const ui.Size(20, 40));
final DomCanvasElement huge = getDisplayCanvas(surface);
canvasSize = getCssSize(surface);
expect(huge, same(secondIncrease));
// Also over-allocated
expect(huge.width, 28);
expect(huge.height, 56);
expect(canvasSize.width, 28);
expect(canvasSize.height, 56);
// Shrink again. Reuse the last allocated surface.
surface.createOrUpdateSurface(const ui.Size(5, 15));
final DomCanvasElement shrunk2 = getDisplayCanvas(surface);
canvasSize = getCssSize(surface);
expect(shrunk2, same(huge));
expect(shrunk2.width, 28);
expect(shrunk2.height, 56);
expect(canvasSize.width, 28);
expect(canvasSize.height, 56);
// Doubling the DPR should halve the CSS width, height, and translation of the canvas.
// This tests https://github.com/flutter/flutter/issues/77084
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(2.0);
surface.createOrUpdateSurface(const ui.Size(5, 15));
final DomCanvasElement dpr2Canvas = getDisplayCanvas(surface);
canvasSize = getCssSize(surface);
expect(dpr2Canvas, same(huge));
expect(dpr2Canvas.width, 28);
expect(dpr2Canvas.height, 56);
// Canvas is half the size in logical pixels because device pixel ratio is
// 2.0.
expect(canvasSize.width, 14);
expect(canvasSize.height, 28);
// Skip on wasm since same() doesn't work for JSValues.
}, skip: isWasm);
test(
'Surface creates new context when WebGL context is restored',
() async {
final Surface surface = Surface();
expect(surface.debugForceNewContext, isTrue);
final CkSurface before =
surface.acquireFrame(const ui.Size(9, 19)).skiaSurface;
expect(surface.debugForceNewContext, isFalse);
// Pump a timer to flush any microtasks.
await Future<void>.delayed(Duration.zero);
final CkSurface afterAcquireFrame =
surface.acquireFrame(const ui.Size(9, 19)).skiaSurface;
// Existing context is reused.
expect(afterAcquireFrame, same(before));
// Emulate WebGL context loss.
final DomOffscreenCanvas canvas = surface.debugOffscreenCanvas!;
final Object ctx = canvas.getContext('webgl2')!;
final Object loseContextExtension = js_util.callMethod(
ctx,
'getExtension',
<String>['WEBGL_lose_context'],
);
js_util.callMethod<void>(
loseContextExtension, 'loseContext', const <void>[]);
// Pump a timer to allow the "lose context" event to propagate.
await Future<void>.delayed(Duration.zero);
// We don't create a new GL context until the context is restored.
expect(surface.debugContextLost, isTrue);
final bool isContextLost =
js_util.callMethod<bool>(ctx, 'isContextLost', const <void>[]);
expect(isContextLost, isTrue);
// Emulate WebGL context restoration.
js_util.callMethod<void>(
loseContextExtension, 'restoreContext', const <void>[]);
// Pump a timer to allow the "restore context" event to propagate.
await Future<void>.delayed(Duration.zero);
expect(surface.debugForceNewContext, isTrue);
final CkSurface afterContextLost =
surface.acquireFrame(const ui.Size(9, 19)).skiaSurface;
// A new context is created.
expect(afterContextLost, isNot(same(before)));
},
// Firefox can't create a WebGL2 context in headless mode.
skip: isFirefox || !Surface.offscreenCanvasSupported,
);
// Regression test for https://github.com/flutter/flutter/issues/75286
test(
'updates canvas logical size when device-pixel ratio changes',
() {
final Surface surface = Surface();
final CkSurface original =
surface.acquireFrame(const ui.Size(10, 16)).skiaSurface;
expect(original.width(), 10);
expect(original.height(), 16);
expect(surface.debugOffscreenCanvas!.width, 10);
expect(surface.debugOffscreenCanvas!.height, 16);
// Increase device-pixel ratio: this makes CSS pixels bigger, so we need
// fewer of them to cover the browser window.
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(2.0);
final CkSurface highDpr =
surface.acquireFrame(const ui.Size(10, 16)).skiaSurface;
expect(highDpr.width(), 10);
expect(highDpr.height(), 16);
expect(surface.debugOffscreenCanvas!.width, 10);
expect(surface.debugOffscreenCanvas!.height, 16);
// Decrease device-pixel ratio: this makes CSS pixels smaller, so we need
// more of them to cover the browser window.
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(0.5);
final CkSurface lowDpr =
surface.acquireFrame(const ui.Size(10, 16)).skiaSurface;
expect(lowDpr.width(), 10);
expect(lowDpr.height(), 16);
expect(surface.debugOffscreenCanvas!.width, 10);
expect(surface.debugOffscreenCanvas!.height, 16);
// See https://github.com/flutter/flutter/issues/77084#issuecomment-1120151172
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(2.0);
final CkSurface changeRatioAndSize =
surface.acquireFrame(const ui.Size(9.9, 15.9)).skiaSurface;
expect(changeRatioAndSize.width(), 10);
expect(changeRatioAndSize.height(), 16);
expect(surface.debugOffscreenCanvas!.width, 10);
expect(surface.debugOffscreenCanvas!.height, 16);
},
skip: !Surface.offscreenCanvasSupported,
);
});
}
DomCanvasElement getDisplayCanvas(Surface surface) {
assert(surface.isDisplayCanvas);
return surface.hostElement.children.first as DomCanvasElement;
}
/// Extracts the CSS style values of 'width' and 'height' and returns them
/// as a [ui.Size].
ui.Size getCssSize(Surface surface) {
final DomCanvasElement canvas = getDisplayCanvas(surface);
final String cssWidth = canvas.style.width;
final String cssHeight = canvas.style.height;
// CSS width and height should be in the form 'NNNpx'. So cut off the 'px' and
// convert to a number.
final double width =
double.parse(cssWidth.substring(0, cssWidth.length - 2).trim());
final double height =
double.parse(cssHeight.substring(0, cssHeight.length - 2).trim());
return ui.Size(width, height);
}
| engine/lib/web_ui/test/canvaskit/surface_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/canvaskit/surface_test.dart",
"repo_id": "engine",
"token_count": 4890
} | 308 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import '../common/mock_engine_canvas.dart';
import '../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
await bootstrapAndRunApp(withImplicitView: true);
group('EngineCanvas', () {
late MockEngineCanvas mockCanvas;
late ui.Paragraph paragraph;
void testCanvas(
String description,
void Function(EngineCanvas canvas) testFn, {
ui.Rect canvasSize = const ui.Rect.fromLTWH(0, 0, 100, 100),
ui.VoidCallback? whenDone,
}) {
test(description, () {
testFn(BitmapCanvas(canvasSize, RenderStrategy()));
testFn(DomCanvas(domDocument.createElement('flt-picture')));
testFn(mockCanvas = MockEngineCanvas());
whenDone?.call();
});
}
testCanvas('draws laid out paragraph', (EngineCanvas canvas) {
const ui.Rect screenRect = ui.Rect.fromLTWH(0, 0, 100, 100);
final RecordingCanvas recordingCanvas = RecordingCanvas(screenRect);
final ui.ParagraphBuilder builder =
ui.ParagraphBuilder(ui.ParagraphStyle());
builder.addText('sample');
paragraph = builder.build();
paragraph.layout(const ui.ParagraphConstraints(width: 100));
recordingCanvas.drawParagraph(paragraph, const ui.Offset(10, 10));
recordingCanvas.endRecording();
canvas.clear();
recordingCanvas.apply(canvas, screenRect);
}, whenDone: () {
expect(mockCanvas.methodCallLog, hasLength(3));
MockCanvasCall call = mockCanvas.methodCallLog[0];
expect(call.methodName, 'clear');
call = mockCanvas.methodCallLog[1];
expect(call.methodName, 'drawParagraph');
final Map<dynamic, dynamic> arguments = call.arguments as Map<dynamic, dynamic>;
expect(arguments['paragraph'], paragraph);
expect(arguments['offset'], const ui.Offset(10, 10));
});
testCanvas('ignores paragraphs that were not laid out',
(EngineCanvas canvas) {
const ui.Rect screenRect = ui.Rect.fromLTWH(0, 0, 100, 100);
final RecordingCanvas recordingCanvas = RecordingCanvas(screenRect);
final ui.ParagraphBuilder builder =
ui.ParagraphBuilder(ui.ParagraphStyle());
builder.addText('sample');
final ui.Paragraph paragraph = builder.build();
recordingCanvas.drawParagraph(paragraph, const ui.Offset(10, 10));
recordingCanvas.endRecording();
canvas.clear();
recordingCanvas.apply(canvas, screenRect);
}, whenDone: () {
expect(mockCanvas.methodCallLog, hasLength(2));
expect(mockCanvas.methodCallLog[0].methodName, 'clear');
expect(mockCanvas.methodCallLog[1].methodName, 'endOfPaint');
});
});
}
| engine/lib/web_ui/test/engine/canvas_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/canvas_test.dart",
"repo_id": "engine",
"token_count": 1144
} | 309 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart';
import '../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests();
Future<Image> createTestImageByColor(Color color) async {
final EnginePictureRecorder recorder = EnginePictureRecorder();
final RecordingCanvas canvas =
recorder.beginRecording(const Rect.fromLTRB(0, 0, 2, 2));
canvas.drawColor(color, BlendMode.srcOver);
final Picture testPicture = recorder.endRecording();
final Image testImage = await testPicture.toImage(2, 2);
return testImage;
}
test('Picture.toImage().toByteData()', () async {
final Image testImage = await createTestImageByColor(const Color(0xFFCCDD00));
final ByteData bytes =
(await testImage.toByteData())!;
expect(
bytes.buffer.asUint32List(),
<int>[0xFF00DDCC, 0xFF00DDCC, 0xFF00DDCC, 0xFF00DDCC],
);
final ByteData pngBytes =
(await testImage.toByteData(format: ImageByteFormat.png))!;
// PNG-encoding is browser-specific, but the header is standard. We only
// test the header.
final List<int> pngHeader = <int>[137, 80, 78, 71, 13, 10, 26, 10];
expect(
pngBytes.buffer.asUint8List().sublist(0, pngHeader.length),
pngHeader,
);
});
test('Image.toByteData(format: ImageByteFormat.rawStraightRgba)', () async {
final Image testImage = await createTestImageByColor(const Color(0xAAFFFF00));
final ByteData bytes =
(await testImage.toByteData(format: ImageByteFormat.rawStraightRgba))!;
expect(
bytes.buffer.asUint32List(),
<int>[0xAA00FFFF, 0xAA00FFFF, 0xAA00FFFF, 0xAA00FFFF],
);
});
}
| engine/lib/web_ui/test/engine/image_to_byte_data_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/image_to_byte_data_test.dart",
"repo_id": "engine",
"token_count": 748
} | 310 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@TestOn('browser')
library;
import 'dart:async';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui show Offset;
void main() {
internalBootstrapBrowserTest(() => doTests);
}
void doTests() {
late EngineFlutterView view;
late DomElement rootElement;
late DomElement eventSource;
final StreamController<DomEvent> events = StreamController<DomEvent>.broadcast();
/// Dispatches an event `e` on `target`, and returns it after it's gone through the browser.
Future<DomPointerEvent> dispatchAndCatch(DomElement target, DomPointerEvent e) async {
final Future<DomEvent> nextEvent = events.stream.first;
target.dispatchEvent(e);
return (await nextEvent) as DomPointerEvent;
}
group('computeEventOffsetToTarget', () {
setUp(() {
view = EngineFlutterView(EnginePlatformDispatcher.instance, domDocument.body!);
rootElement = view.dom.rootElement;
eventSource = createDomElement('div-event-source');
rootElement.append(eventSource);
// make containers known fixed sizes, absolutely positioned elements, so
// we can reason about screen coordinates relatively easily later!
rootElement.style
..position = 'absolute'
..width = '320px'
..height = '240px'
..top = '0px'
..left = '0px';
eventSource.style
..position = 'absolute'
..width = '100px'
..height = '80px'
..top = '100px'
..left = '120px';
rootElement.addEventListener('click', createDomEventListener((DomEvent e) {
events.add(e);
}));
});
tearDown(() {
view.dispose();
});
test('Event dispatched by target returns offsetX, offsetY', () async {
// Fire an event contained within target...
final DomMouseEvent event = await dispatchAndCatch(rootElement, createDomPointerEvent(
'click',
<String, Object>{
'bubbles': true,
'clientX': 10,
'clientY': 20,
}
));
expect(event.offsetX, 10);
expect(event.offsetY, 20);
final ui.Offset offset = computeEventOffsetToTarget(event, view);
expect(offset.dx, event.offsetX);
expect(offset.dy, event.offsetY);
});
test('Event dispatched on child re-computes offset (offsetX/Y invalid)', () async {
// Fire an event contained within target...
final DomMouseEvent event = await dispatchAndCatch(eventSource, createDomPointerEvent(
'click',
<String, Object>{
'bubbles': true, // So it can be caught in `target`
'clientX': 140, // x = 20px into `eventSource`.
'clientY': 110, // y = 10px into `eventSource`.
}
));
expect(event.offsetX, 20);
expect(event.offsetY, 10);
final ui.Offset offset = computeEventOffsetToTarget(event, view);
expect(offset.dx, 140);
expect(offset.dy, 110);
});
test('Event dispatched by TalkBack gets a computed offset', () async {
// Fill this in to test _computeOffsetForTalkbackEvent
}, skip: 'To be implemented!');
test('Event dispatched on text editing node computes offset with framework geometry', () async {
// Fill this in to test _computeOffsetForInputs
}, skip: 'To be implemented!');
});
}
| engine/lib/web_ui/test/engine/pointer_binding/event_position_helper_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/pointer_binding/event_position_helper_test.dart",
"repo_id": "engine",
"token_count": 1304
} | 311 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:typed_data';
import 'package:quiver/testing/async.dart';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'package:ui/ui_web/src/ui_web.dart' as ui_web;
import '../../common/rendering.dart';
import '../../common/test_initialization.dart';
import 'semantics_tester.dart';
DateTime _testTime = DateTime(2018, 12, 17);
EngineSemantics semantics() => EngineSemantics.instance;
EngineSemanticsOwner owner() => EnginePlatformDispatcher.instance.implicitView!.semantics;
DomElement get platformViewsHost =>
EnginePlatformDispatcher.instance.implicitView!.dom.platformViewsHost;
void main() {
internalBootstrapBrowserTest(() {
return testMain;
});
}
Future<void> testMain() async {
await bootstrapAndRunApp(withImplicitView: true);
setUpRenderingForTests();
runSemanticsTests();
}
void runSemanticsTests() {
setUp(() {
EngineSemantics.debugResetSemantics();
});
group(EngineSemanticsOwner, () {
_testEngineSemanticsOwner();
});
group('longestIncreasingSubsequence', () {
_testLongestIncreasingSubsequence();
});
group('Role managers', () {
_testRoleManagerLifecycle();
});
group('Text', () {
_testText();
});
group('labels', () {
_testLabels();
});
group('container', () {
_testContainer();
});
group('vertical scrolling', () {
_testVerticalScrolling();
});
group('horizontal scrolling', () {
_testHorizontalScrolling();
});
group('incrementable', () {
_testIncrementables();
});
group('text field', () {
_testTextField();
});
group('checkboxes, radio buttons and switches', () {
_testCheckables();
});
group('tappable', () {
_testTappable();
});
group('image', () {
_testImage();
});
group('header', () {
_testHeader();
});
group('live region', () {
_testLiveRegion();
});
group('platform view', () {
_testPlatformView();
});
group('accessibility builder', () {
_testEngineAccessibilityBuilder();
});
group('group', () {
_testGroup();
});
group('dialog', () {
_testDialog();
});
group('focusable', () {
_testFocusable();
});
group('link', () {
_testLink();
});
}
void _testRoleManagerLifecycle() {
test('Secondary role managers are added upon node initialization', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
// Check that roles are initialized immediately
{
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
isButton: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
tester.expectSemantics('<sem role="button" style="$rootSemanticStyle"></sem>');
final SemanticsObject node = owner().debugSemanticsTree![0]!;
expect(node.primaryRole?.role, PrimaryRole.button);
expect(
node.primaryRole?.debugSecondaryRoles,
containsAll(<Role>[Role.focusable, Role.tappable, Role.labelAndValue]),
);
expect(tester.getSemanticsObject(0).element.tabIndex, -1);
}
// Check that roles apply their functionality upon update.
{
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
label: 'a label',
isFocusable: true,
isButton: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
tester.expectSemantics('<sem role="button" style="$rootSemanticStyle">a label</sem>');
final SemanticsObject node = owner().debugSemanticsTree![0]!;
expect(node.primaryRole?.role, PrimaryRole.button);
expect(
node.primaryRole?.debugSecondaryRoles,
containsAll(<Role>[Role.focusable, Role.tappable, Role.labelAndValue]),
);
expect(tester.getSemanticsObject(0).element.tabIndex, 0);
}
semantics().semanticsEnabled = false;
});
}
void _testEngineAccessibilityBuilder() {
final EngineAccessibilityFeaturesBuilder builder =
EngineAccessibilityFeaturesBuilder(0);
EngineAccessibilityFeatures features = builder.build();
test('accessible navigation', () {
expect(features.accessibleNavigation, isFalse);
builder.accessibleNavigation = true;
features = builder.build();
expect(features.accessibleNavigation, isTrue);
});
test('bold text', () {
expect(features.boldText, isFalse);
builder.boldText = true;
features = builder.build();
expect(features.boldText, isTrue);
});
test('disable animations', () {
expect(features.disableAnimations, isFalse);
builder.disableAnimations = true;
features = builder.build();
expect(features.disableAnimations, isTrue);
});
test('high contrast', () {
expect(features.highContrast, isFalse);
builder.highContrast = true;
features = builder.build();
expect(features.highContrast, isTrue);
});
test('invert colors', () {
expect(features.invertColors, isFalse);
builder.invertColors = true;
features = builder.build();
expect(features.invertColors, isTrue);
});
test('on off switch labels', () {
expect(features.onOffSwitchLabels, isFalse);
builder.onOffSwitchLabels = true;
features = builder.build();
expect(features.onOffSwitchLabels, isTrue);
});
test('reduce motion', () {
expect(features.reduceMotion, isFalse);
builder.reduceMotion = true;
features = builder.build();
expect(features.reduceMotion, isTrue);
});
}
void _testEngineSemanticsOwner() {
test('instantiates a singleton', () {
expect(semantics(), same(semantics()));
});
test('semantics is off by default', () {
expect(semantics().semanticsEnabled, isFalse);
});
test('default mode is "unknown"', () {
expect(semantics().mode, AccessibilityMode.unknown);
});
test('accessibilityFeatures copyWith function works', () {
const EngineAccessibilityFeatures original = EngineAccessibilityFeatures(0);
EngineAccessibilityFeatures copy =
original.copyWith(accessibleNavigation: true);
expect(copy.accessibleNavigation, true);
expect(copy.boldText, false);
expect(copy.disableAnimations, false);
expect(copy.highContrast, false);
expect(copy.invertColors, false);
expect(copy.onOffSwitchLabels, false);
expect(copy.reduceMotion, false);
copy = original.copyWith(boldText: true);
expect(copy.accessibleNavigation, false);
expect(copy.boldText, true);
expect(copy.disableAnimations, false);
expect(copy.highContrast, false);
expect(copy.invertColors, false);
expect(copy.onOffSwitchLabels, false);
expect(copy.reduceMotion, false);
copy = original.copyWith(disableAnimations: true);
expect(copy.accessibleNavigation, false);
expect(copy.boldText, false);
expect(copy.disableAnimations, true);
expect(copy.highContrast, false);
expect(copy.invertColors, false);
expect(copy.onOffSwitchLabels, false);
expect(copy.reduceMotion, false);
copy = original.copyWith(highContrast: true);
expect(copy.accessibleNavigation, false);
expect(copy.boldText, false);
expect(copy.disableAnimations, false);
expect(copy.highContrast, true);
expect(copy.invertColors, false);
expect(copy.onOffSwitchLabels, false);
expect(copy.reduceMotion, false);
copy = original.copyWith(invertColors: true);
expect(copy.accessibleNavigation, false);
expect(copy.boldText, false);
expect(copy.disableAnimations, false);
expect(copy.highContrast, false);
expect(copy.invertColors, true);
expect(copy.onOffSwitchLabels, false);
expect(copy.reduceMotion, false);
copy = original.copyWith(onOffSwitchLabels: true);
expect(copy.accessibleNavigation, false);
expect(copy.boldText, false);
expect(copy.disableAnimations, false);
expect(copy.highContrast, false);
expect(copy.invertColors, false);
expect(copy.onOffSwitchLabels, true);
expect(copy.reduceMotion, false);
copy = original.copyWith(reduceMotion: true);
expect(copy.accessibleNavigation, false);
expect(copy.boldText, false);
expect(copy.disableAnimations, false);
expect(copy.highContrast, false);
expect(copy.invertColors, false);
expect(copy.onOffSwitchLabels, false);
expect(copy.reduceMotion, true);
});
void renderSemantics({String? label, String? tooltip, Set<ui.SemanticsFlag> flags = const <ui.SemanticsFlag>{}}) {
int flagValues = 0;
for (final ui.SemanticsFlag flag in flags) {
flagValues = flagValues | flag.index;
}
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 20, 20),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
label: label ?? '',
tooltip: tooltip ?? '',
flags: flagValues,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 20, 20),
);
owner().updateSemantics(builder.build());
}
void renderLabel(String label) {
renderSemantics(label: label);
}
test('produces a label', () async {
semantics().semanticsEnabled = true;
// Create
renderLabel('Hello');
final Map<int, SemanticsObject> tree = owner().debugSemanticsTree!;
expect(tree.length, 2);
expect(tree[0]!.id, 0);
expect(tree[0]!.element.tagName.toLowerCase(), 'flt-semantics');
expect(tree[1]!.id, 1);
expect(tree[1]!.label, 'Hello');
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem role="text">Hello</sem>
</sem-c>
</sem>''');
// Update
renderLabel('World');
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem role="text">World</sem>
</sem-c>
</sem>''');
// Remove
renderLabel('');
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem role="text"></sem>
</sem-c>
</sem>''');
semantics().semanticsEnabled = false;
});
test('can switch role', () async {
semantics().semanticsEnabled = true;
// Create
renderSemantics(label: 'Hello');
Map<int, SemanticsObject> tree = owner().debugSemanticsTree!;
expect(tree.length, 2);
expect(tree[1]!.element.tagName.toLowerCase(), 'flt-semantics');
expect(tree[1]!.id, 1);
expect(tree[1]!.label, 'Hello');
final DomElement existingParent = tree[1]!.element.parent!;
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem role="text">Hello</sem>
</sem-c>
</sem>''');
// Update
renderSemantics(label: 'Hello', flags: <ui.SemanticsFlag>{ ui.SemanticsFlag.isLink });
tree = owner().debugSemanticsTree!;
expect(tree.length, 2);
expect(tree[1]!.id, 1);
expect(tree[1]!.label, 'Hello');
expect(tree[1]!.element.tagName.toLowerCase(), 'a');
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<a style="display: block;">Hello</a>
</sem-c>
</sem>''');
expect(existingParent, tree[1]!.element.parent);
semantics().semanticsEnabled = false;
});
test('tooltip is part of label', () async {
semantics().semanticsEnabled = true;
// Create
renderSemantics(tooltip: 'tooltip');
final Map<int, SemanticsObject> tree = owner().debugSemanticsTree!;
expect(tree.length, 2);
expect(tree[0]!.id, 0);
expect(tree[0]!.element.tagName.toLowerCase(), 'flt-semantics');
expect(tree[1]!.id, 1);
expect(tree[1]!.tooltip, 'tooltip');
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem>tooltip</sem>
</sem-c>
</sem>''');
// Update
renderSemantics(label: 'Hello', tooltip: 'tooltip');
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem role="text">tooltip\nHello</sem>
</sem-c>
</sem>''');
// Remove
renderSemantics();
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem role="text"></sem>
</sem-c>
</sem>''');
semantics().semanticsEnabled = false;
});
test('clears semantics tree when disabled', () {
expect(owner().debugSemanticsTree, isEmpty);
semantics().semanticsEnabled = true;
renderLabel('Hello');
expect(owner().debugSemanticsTree, isNotEmpty);
semantics().semanticsEnabled = false;
expect(owner().debugSemanticsTree, isEmpty);
});
test('accepts standalone browser gestures', () {
semantics().semanticsEnabled = true;
expect(semantics().shouldAcceptBrowserGesture('click'), isTrue);
semantics().semanticsEnabled = false;
});
test('rejects browser gestures accompanied by pointer click', () {
FakeAsync().run((FakeAsync fakeAsync) {
semantics()
..debugOverrideTimestampFunction(fakeAsync.getClock(_testTime).now)
..semanticsEnabled = true;
expect(semantics().shouldAcceptBrowserGesture('click'), isTrue);
semantics().receiveGlobalEvent(createDomEvent('Event', 'pointermove'));
expect(semantics().shouldAcceptBrowserGesture('click'), isFalse);
// After 1 second of inactivity a browser gestures counts as standalone.
fakeAsync.elapse(const Duration(seconds: 1));
expect(semantics().shouldAcceptBrowserGesture('click'), isTrue);
semantics().semanticsEnabled = false;
});
});
test('checks shouldEnableSemantics for every global event', () {
final MockSemanticsEnabler mockSemanticsEnabler = MockSemanticsEnabler();
semantics().semanticsHelper.semanticsEnabler = mockSemanticsEnabler;
final DomEvent pointerEvent = createDomEvent('Event', 'pointermove');
semantics().receiveGlobalEvent(pointerEvent);
// Verify the interactions.
expect(
mockSemanticsEnabler.shouldEnableSemanticsEvents,
<DomEvent>[pointerEvent],
);
});
test('forwards events to framework if shouldEnableSemantics returns true',
() {
final MockSemanticsEnabler mockSemanticsEnabler = MockSemanticsEnabler();
semantics().semanticsHelper.semanticsEnabler = mockSemanticsEnabler;
final DomEvent pointerEvent = createDomEvent('Event', 'pointermove');
mockSemanticsEnabler.shouldEnableSemanticsReturnValue = true;
expect(semantics().receiveGlobalEvent(pointerEvent), isTrue);
});
test('semantics owner update phases', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
expect(
reason: 'Should start in idle phase',
owner().phase,
SemanticsUpdatePhase.idle,
);
void pumpSemantics({ required String label }) {
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
children: <SemanticsNodeUpdate>[
tester.updateNode(id: 1, label: label),
],
);
tester.apply();
}
SemanticsUpdatePhase? capturedPostUpdateCallbackPhase;
owner().addOneTimePostUpdateCallback(() {
capturedPostUpdateCallbackPhase = owner().phase;
});
pumpSemantics(label: 'Hello');
final SemanticsObject semanticsObject = owner().debugSemanticsTree![1]!;
expect(
reason: 'Should be in postUpdate phase while calling post-update callbacks',
capturedPostUpdateCallbackPhase,
SemanticsUpdatePhase.postUpdate,
);
expect(
reason: 'After the update is done, should go back to idle',
owner().phase,
SemanticsUpdatePhase.idle,
);
// Rudely replace the role manager with a mock, and trigger an update.
final MockRoleManager mockRoleManager = MockRoleManager(PrimaryRole.generic, semanticsObject);
semanticsObject.primaryRole = mockRoleManager;
pumpSemantics(label: 'World');
expect(
reason: 'While updating must be in SemanticsUpdatePhase.updating phase',
mockRoleManager.log,
<MockRoleManagerLogEntry>[
(method: 'update', phase: SemanticsUpdatePhase.updating),
],
);
semantics().semanticsEnabled = false;
});
}
typedef MockRoleManagerLogEntry = ({
String method,
SemanticsUpdatePhase phase,
});
class MockRoleManager extends PrimaryRoleManager {
MockRoleManager(super.role, super.semanticsObject) : super.blank();
final List<MockRoleManagerLogEntry> log = <MockRoleManagerLogEntry>[];
void _log(String method) {
log.add((
method: method,
phase: semanticsObject.owner.phase,
));
}
@override
void update() {
super.update();
_log('update');
}
@override
bool focusAsRouteDefault() {
throw UnimplementedError();
}
}
class MockSemanticsEnabler implements SemanticsEnabler {
@override
void dispose() {}
@override
bool get isWaitingToEnableSemantics => throw UnimplementedError();
@override
DomElement prepareAccessibilityPlaceholder() {
throw UnimplementedError();
}
bool shouldEnableSemanticsReturnValue = false;
final List<DomEvent> shouldEnableSemanticsEvents = <DomEvent>[];
@override
bool shouldEnableSemantics(DomEvent event) {
shouldEnableSemanticsEvents.add(event);
return shouldEnableSemanticsReturnValue;
}
@override
bool tryEnableSemantics(DomEvent event) {
throw UnimplementedError();
}
}
void _testHeader() {
test('renders heading role for headers', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
flags: 0 | ui.SemanticsFlag.isHeader.index,
label: 'Header of the page',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="heading" style="$rootSemanticStyle">Header of the page</sem>
''');
semantics().semanticsEnabled = false;
});
// When a header has child elements, role="heading" prevents AT from reaching
// child elements. To fix that role="group" is used, even though that causes
// the heading to not be announced as a heading. If the app really needs the
// heading to be announced as a heading, the developer can restructure the UI
// such that the heading is not a parent node, but a side-note, e.g. preceding
// the child list.
test('uses group role for headers when children are present', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
flags: 0 | ui.SemanticsFlag.isHeader.index,
label: 'Header of the page',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="group" aria-label="Header of the page" style="$rootSemanticStyle"><sem-c><sem></sem></sem-c></sem>
''');
semantics().semanticsEnabled = false;
});
}
void _testLongestIncreasingSubsequence() {
void expectLis(List<int> list, List<int> seq) {
expect(longestIncreasingSubsequence(list), seq);
}
test('trivial case', () {
expectLis(<int>[], <int>[]);
});
test('longest in the middle', () {
expectLis(<int>[10, 1, 2, 3, 0], <int>[1, 2, 3]);
});
test('longest at head', () {
expectLis(<int>[1, 2, 3, 0], <int>[0, 1, 2]);
});
test('longest at tail', () {
expectLis(<int>[10, 1, 2, 3], <int>[1, 2, 3]);
});
test('longest in a jagged pattern', () {
expectLis(
<int>[0, 1, -1, 2, -2, 3, -3, 4, -4, 5, -5], <int>[0, 1, 3, 5, 7, 9]);
});
test('fully sorted up', () {
for (int count = 0; count < 100; count += 1) {
expectLis(
List<int>.generate(count, (int i) => 10 * i),
List<int>.generate(count, (int i) => i),
);
}
});
test('fully sorted down', () {
for (int count = 1; count < 100; count += 1) {
expectLis(
List<int>.generate(count, (int i) => 10 * (count - i)),
<int>[count - 1],
);
}
});
}
void _testText() {
test('renders a piece of plain text', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
label: 'plain text',
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(
owner(),
'''<sem role="text" style="$rootSemanticStyle">plain text</sem>''',
);
final SemanticsObject node = owner().debugSemanticsTree![0]!;
expect(node.primaryRole?.role, PrimaryRole.generic);
expect(
node.primaryRole!.secondaryRoleManagers!.map((RoleManager m) => m.runtimeType).toList(),
<Type>[
Focusable,
LiveRegion,
RouteName,
LabelAndValue,
],
);
semantics().semanticsEnabled = false;
});
test('renders a tappable piece of text', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
hasTap: true,
label: 'tappable text',
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
expectSemanticsTree(
owner(),
'''<sem flt-tappable="" role="text" style="$rootSemanticStyle">tappable text</sem>''',
);
final SemanticsObject node = owner().debugSemanticsTree![0]!;
expect(node.primaryRole?.role, PrimaryRole.generic);
expect(
node.primaryRole!.secondaryRoleManagers!.map((RoleManager m) => m.runtimeType).toList(),
<Type>[
Focusable,
LiveRegion,
RouteName,
LabelAndValue,
Tappable,
],
);
semantics().semanticsEnabled = false;
});
}
void _testLabels() {
test('computeDomSemanticsLabel combines tooltip, label, value, and hint', () {
expect(
computeDomSemanticsLabel(tooltip: 'tooltip'),
'tooltip',
);
expect(
computeDomSemanticsLabel(label: 'label'),
'label',
);
expect(
computeDomSemanticsLabel(value: 'value'),
'value',
);
expect(
computeDomSemanticsLabel(hint: 'hint'),
'hint',
);
expect(
computeDomSemanticsLabel(tooltip: 'tooltip', label: 'label', hint: 'hint', value: 'value'),
'''
tooltip
label hint value'''
);
expect(
computeDomSemanticsLabel(tooltip: 'tooltip', hint: 'hint', value: 'value'),
'''
tooltip
hint value'''
);
expect(
computeDomSemanticsLabel(tooltip: 'tooltip', label: 'label', value: 'value'),
'''
tooltip
label value'''
);
expect(
computeDomSemanticsLabel(tooltip: 'tooltip', label: 'label', hint: 'hint'),
'''
tooltip
label hint'''
);
});
test('computeDomSemanticsLabel collapses empty labels to null', () {
expect(
computeDomSemanticsLabel(),
isNull,
);
expect(
computeDomSemanticsLabel(tooltip: ''),
isNull,
);
expect(
computeDomSemanticsLabel(label: ''),
isNull,
);
expect(
computeDomSemanticsLabel(value: ''),
isNull,
);
expect(
computeDomSemanticsLabel(hint: ''),
isNull,
);
expect(
computeDomSemanticsLabel(tooltip: '', label: '', hint: '', value: ''),
isNull,
);
expect(
computeDomSemanticsLabel(tooltip: '', hint: '', value: ''),
isNull,
);
expect(
computeDomSemanticsLabel(tooltip: '', label: '', value: ''),
isNull,
);
expect(
computeDomSemanticsLabel(tooltip: '', label: '', hint: ''),
isNull,
);
});
}
void _testContainer() {
test('container node has no transform when there is no rect offset',
() async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
const ui.Rect zeroOffsetRect = ui.Rect.fromLTRB(0, 0, 20, 20);
updateNode(
builder,
transform: Matrix4.identity().toFloat64(),
rect: zeroOffsetRect,
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: zeroOffsetRect,
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem></sem>
</sem-c>
</sem>''');
final DomElement parentElement =
owner().semanticsHost.querySelector('flt-semantics')!;
final DomElement container =
owner().semanticsHost.querySelector('flt-semantics-container')!;
if (isMacOrIOS) {
expect(parentElement.style.top, '0px');
expect(parentElement.style.left, '0px');
expect(container.style.top, '0px');
expect(container.style.left, '0px');
} else {
expect(parentElement.style.top, '');
expect(parentElement.style.left, '');
expect(container.style.top, '');
expect(container.style.left, '');
}
expect(parentElement.style.transform, '');
expect(parentElement.style.transformOrigin, '');
expect(container.style.transform, '');
expect(container.style.transformOrigin, '');
semantics().semanticsEnabled = false;
});
test('container node compensates for rect offset', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(10, 10, 20, 20),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(10, 10, 20, 20),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem></sem>
</sem-c>
</sem>''');
final DomElement parentElement =
owner().semanticsHost.querySelector('flt-semantics')!;
final DomElement container =
owner().semanticsHost.querySelector('flt-semantics-container')!;
expect(parentElement.style.transform, 'matrix(1, 0, 0, 1, 10, 10)');
if (isSafari) {
// macOS 13 returns different values than macOS 12.
expect(parentElement.style.transformOrigin, anyOf(contains('0px 0px 0px'), contains('0px 0px')));
} else {
expect(parentElement.style.transformOrigin, '0px 0px 0px');
}
expect(container.style.top, '-10px');
expect(container.style.left, '-10px');
semantics().semanticsEnabled = false;
});
test('0 offsets are not removed for voiceover', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 20, 20),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(10, 10, 20, 20),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem></sem>
</sem-c>
</sem>''');
final DomElement parentElement =
owner().semanticsHost.querySelector('flt-semantics')!;
final DomElement container =
owner().semanticsHost.querySelector('flt-semantics-container')!;
if (isMacOrIOS) {
expect(parentElement.style.top, '0px');
expect(parentElement.style.left, '0px');
expect(container.style.top, '0px');
expect(container.style.left, '0px');
} else {
expect(parentElement.style.top, '');
expect(parentElement.style.left, '');
expect(container.style.top, '');
expect(container.style.left, '');
}
expect(parentElement.style.transform, '');
expect(parentElement.style.transformOrigin, '');
expect(container.style.transform, '');
expect(container.style.transformOrigin, '');
semantics().semanticsEnabled = false;
});
test('renders in traversal order, hit-tests in reverse z-index order',
() async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
// State 1: render initial tree with middle elements swapped hit-test wise
{
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
childrenInTraversalOrder: Int32List.fromList(<int>[1, 2, 3, 4]),
childrenInHitTestOrder: Int32List.fromList(<int>[1, 3, 2, 4]),
);
for (int id = 1; id <= 4; id++) {
updateNode(builder, id: id);
}
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem style="z-index: 4"></sem>
<sem style="z-index: 2"></sem>
<sem style="z-index: 3"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>''');
}
// State 2: update z-index
{
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
childrenInTraversalOrder: Int32List.fromList(<int>[1, 2, 3, 4]),
childrenInHitTestOrder: Int32List.fromList(<int>[1, 2, 3, 4]),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem style="z-index: 4"></sem>
<sem style="z-index: 3"></sem>
<sem style="z-index: 2"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>''');
}
// State 3: update traversal order
{
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
childrenInTraversalOrder: Int32List.fromList(<int>[4, 2, 3, 1]),
childrenInHitTestOrder: Int32List.fromList(<int>[1, 2, 3, 4]),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem style="z-index: 1"></sem>
<sem style="z-index: 3"></sem>
<sem style="z-index: 2"></sem>
<sem style="z-index: 4"></sem>
</sem-c>
</sem>''');
}
// State 3: update both orders
{
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
childrenInTraversalOrder: Int32List.fromList(<int>[1, 3, 2, 4]),
childrenInHitTestOrder: Int32List.fromList(<int>[3, 4, 1, 2]),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem style="z-index: 2"></sem>
<sem style="z-index: 4"></sem>
<sem style="z-index: 1"></sem>
<sem style="z-index: 3"></sem>
</sem-c>
</sem>''');
}
semantics().semanticsEnabled = false;
});
test(
'container nodes are transparent and leaf children are opaque hit-test wise',
() async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
childrenInTraversalOrder: Int32List.fromList(<int>[1, 2]),
childrenInHitTestOrder: Int32List.fromList(<int>[1, 2]),
);
updateNode(builder, id: 1);
updateNode(builder, id: 2);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem style="z-index: 2"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>''');
final DomElement root = owner().semanticsHost.querySelector('#flt-semantic-node-0')!;
expect(root.style.pointerEvents, 'none');
final DomElement child1 =
owner().semanticsHost.querySelector('#flt-semantic-node-1')!;
expect(child1.style.pointerEvents, 'all');
final DomElement child2 =
owner().semanticsHost.querySelector('#flt-semantic-node-2')!;
expect(child2.style.pointerEvents, 'all');
semantics().semanticsEnabled = false;
});
test('descendant nodes are removed from the node map, unless reparented', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
{
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
childrenInTraversalOrder: Int32List.fromList(<int>[1, 2]),
childrenInHitTestOrder: Int32List.fromList(<int>[1, 2]),
);
updateNode(
builder,
id: 1,
childrenInTraversalOrder: Int32List.fromList(<int>[3, 4]),
childrenInHitTestOrder: Int32List.fromList(<int>[3, 4]),
);
updateNode(
builder,
id: 2,
childrenInTraversalOrder: Int32List.fromList(<int>[5, 6]),
childrenInHitTestOrder: Int32List.fromList(<int>[5, 6]),
);
updateNode(builder, id: 3);
updateNode(builder, id: 4);
updateNode(builder, id: 5);
updateNode(builder, id: 6);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem style="z-index: 2">
<sem-c>
<sem style="z-index: 2"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>
<sem style="z-index: 1">
<sem-c>
<sem style="z-index: 2"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>
</sem-c>
</sem>''');
expect(owner().debugSemanticsTree!.keys.toList(), unorderedEquals(<int>[0, 1, 2, 3, 4, 5, 6]));
}
// Remove node #2 => expect nodes #2 and #5 to be removed and #6 reparented.
{
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
childrenInTraversalOrder: Int32List.fromList(<int>[3, 4, 6]),
childrenInHitTestOrder: Int32List.fromList(<int>[3, 4, 6]),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem style="z-index: 2">
<sem-c>
<sem style="z-index: 3"></sem>
<sem style="z-index: 2"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>
</sem-c>
</sem>''');
expect(owner().debugSemanticsTree!.keys.toList(), unorderedEquals(<int>[0, 1, 3, 4, 6]));
}
semantics().semanticsEnabled = false;
});
}
void _testVerticalScrolling() {
test('renders an empty scrollable node', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.scrollUp.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 50, 100),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle; touch-action: none; overflow-y: scroll">
<flt-semantics-scroll-overflow></flt-semantics-scroll-overflow>
</sem>''');
final DomElement scrollable = findScrollable(owner());
expect(scrollable.scrollTop, isPositive);
semantics().semanticsEnabled = false;
});
test('scrollable node with children has a container node', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.scrollUp.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 50, 100),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(10, 10, 20, 20),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle; touch-action: none; overflow-y: scroll">
<flt-semantics-scroll-overflow></flt-semantics-scroll-overflow>
<sem-c>
<sem></sem>
</sem-c>
</sem>''');
final DomElement scrollable = findScrollable(owner());
expect(scrollable, isNotNull);
// When there's less content than the available size the neutral scrollTop
// is still a positive number.
expect(scrollable.scrollTop, isPositive);
semantics().semanticsEnabled = false;
});
test('scrollable node dispatches scroll events', () async {
Future<ui.SemanticsActionEvent> captureSemanticsEvent() {
final Completer<ui.SemanticsActionEvent> completer = Completer<ui.SemanticsActionEvent>();
ui.PlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
completer.complete(event);
};
return completer.future;
}
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
addTearDown(() async {
semantics().semanticsEnabled = false;
});
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 |
ui.SemanticsAction.scrollUp.index |
ui.SemanticsAction.scrollDown.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 50, 100),
childrenInHitTestOrder: Int32List.fromList(<int>[1, 2, 3]),
childrenInTraversalOrder: Int32List.fromList(<int>[1, 2, 3]),
);
for (int id = 1; id <= 3; id++) {
updateNode(
builder,
id: id,
transform: Matrix4.translationValues(0, 50.0 * id, 0).toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 50, 50),
);
}
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle; touch-action: none; overflow-y: scroll">
<flt-semantics-scroll-overflow></flt-semantics-scroll-overflow>
<sem-c>
<sem style="z-index: 3"></sem>
<sem style="z-index: 2"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>''');
final DomElement scrollable = owner().debugSemanticsTree![0]!.element;
expect(scrollable, isNotNull);
// When there's more content than the available size the neutral scrollTop
// is greater than 0 with a maximum of 10 or 9.
int browserMaxScrollDiff = 0;
// The max scroll value varies between `9` and `10` for Safari desktop
// browsers.
if (browserEngine == BrowserEngine.webkit &&
operatingSystem == OperatingSystem.macOs) {
browserMaxScrollDiff = 1;
}
expect(scrollable.scrollTop >= (10 - browserMaxScrollDiff), isTrue);
Future<ui.SemanticsActionEvent> capturedEventFuture = captureSemanticsEvent();
scrollable.scrollTop = 20;
expect(scrollable.scrollTop, 20);
ui.SemanticsActionEvent capturedEvent = await capturedEventFuture;
expect(capturedEvent.nodeId, 0);
expect(capturedEvent.type, ui.SemanticsAction.scrollUp);
expect(capturedEvent.arguments, isNull);
// Engine semantics returns scroll top back to neutral.
expect(scrollable.scrollTop >= (10 - browserMaxScrollDiff), isTrue);
capturedEventFuture = captureSemanticsEvent();
scrollable.scrollTop = 5;
capturedEvent = await capturedEventFuture;
expect(scrollable.scrollTop >= (5 - browserMaxScrollDiff), isTrue);
expect(capturedEvent.nodeId, 0);
expect(capturedEvent.type, ui.SemanticsAction.scrollDown);
expect(capturedEvent.arguments, isNull);
// Engine semantics returns scroll top back to neutral.
expect(scrollable.scrollTop >= (10 - browserMaxScrollDiff), isTrue);
});
}
void _testHorizontalScrolling() {
test('renders an empty scrollable node', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.scrollLeft.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle; touch-action: none; overflow-x: scroll">
<flt-semantics-scroll-overflow></flt-semantics-scroll-overflow>
</sem>''');
semantics().semanticsEnabled = false;
});
test('scrollable node with children has a container node', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.scrollLeft.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(10, 10, 20, 20),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle; touch-action: none; overflow-x: scroll">
<flt-semantics-scroll-overflow></flt-semantics-scroll-overflow>
<sem-c>
<sem></sem>
</sem-c>
</sem>''');
final DomElement scrollable = findScrollable(owner());
expect(scrollable, isNotNull);
// When there's less content than the available size the neutral
// scrollLeft is still a positive number.
expect(scrollable.scrollLeft, isPositive);
semantics().semanticsEnabled = false;
});
test('scrollable node dispatches scroll events', () async {
Future<ui.SemanticsActionEvent> captureSemanticsEvent() {
final Completer<ui.SemanticsActionEvent> completer = Completer<ui.SemanticsActionEvent>();
ui.PlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
completer.complete(event);
};
return completer.future;
}
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
addTearDown(() async {
semantics().semanticsEnabled = false;
});
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 |
ui.SemanticsAction.scrollLeft.index |
ui.SemanticsAction.scrollRight.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[1, 2, 3]),
childrenInTraversalOrder: Int32List.fromList(<int>[1, 2, 3]),
);
for (int id = 1; id <= 3; id++) {
updateNode(
builder,
id: id,
transform: Matrix4.translationValues(50.0 * id, 0, 0).toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 50, 50),
);
}
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle; touch-action: none; overflow-x: scroll">
<flt-semantics-scroll-overflow></flt-semantics-scroll-overflow>
<sem-c>
<sem style="z-index: 3"></sem>
<sem style="z-index: 2"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>''');
final DomElement scrollable = findScrollable(owner());
expect(scrollable, isNotNull);
// When there's more content than the available size the neutral scrollTop
// is greater than 0 with a maximum of 10.
int browserMaxScrollDiff = 0;
// The max scroll value varies between `9` and `10` for Safari desktop
// browsers.
if (browserEngine == BrowserEngine.webkit &&
operatingSystem == OperatingSystem.macOs) {
browserMaxScrollDiff = 1;
}
expect(scrollable.scrollLeft >= (10 - browserMaxScrollDiff), isTrue);
Future<ui.SemanticsActionEvent> capturedEventFuture = captureSemanticsEvent();
scrollable.scrollLeft = 20;
expect(scrollable.scrollLeft, 20);
ui.SemanticsActionEvent capturedEvent = await capturedEventFuture;
expect(capturedEvent.nodeId, 0);
expect(capturedEvent.type, ui.SemanticsAction.scrollLeft);
expect(capturedEvent.arguments, isNull);
// Engine semantics returns scroll position back to neutral.
expect(scrollable.scrollLeft >= (10 - browserMaxScrollDiff), isTrue);
capturedEventFuture = captureSemanticsEvent();
scrollable.scrollLeft = 5;
capturedEvent = await capturedEventFuture;
expect(scrollable.scrollLeft >= (5 - browserMaxScrollDiff), isTrue);
expect(capturedEvent.nodeId, 0);
expect(capturedEvent.type, ui.SemanticsAction.scrollRight);
expect(capturedEvent.arguments, isNull);
// Engine semantics returns scroll top back to neutral.
expect(scrollable.scrollLeft >= (10 - browserMaxScrollDiff), isTrue);
});
}
void _testIncrementables() {
test('renders a trivial incrementable node', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.increase.index,
value: 'd',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<input role="slider" aria-valuenow="1" aria-valuetext="d" aria-valuemax="1" aria-valuemin="1">
</sem>''');
final SemanticsObject node = owner().debugSemanticsTree![0]!;
expect(node.primaryRole?.role, PrimaryRole.incrementable);
expect(
reason: 'Incrementables use custom focus management',
node.primaryRole!.debugSecondaryRoles,
isNot(contains(Role.focusable)),
);
semantics().semanticsEnabled = false;
});
test('increments', () async {
final SemanticsActionLogger logger = SemanticsActionLogger();
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.increase.index,
value: 'd',
increasedValue: 'e',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<input role="slider" aria-valuenow="1" aria-valuetext="d" aria-valuemax="2" aria-valuemin="1">
</sem>''');
final DomHTMLInputElement input =
owner().semanticsHost.querySelector('input')! as DomHTMLInputElement;
input.value = '2';
input.dispatchEvent(createDomEvent('Event', 'change'));
expect(await logger.idLog.first, 0);
expect(await logger.actionLog.first, ui.SemanticsAction.increase);
semantics().semanticsEnabled = false;
});
test('decrements', () async {
final SemanticsActionLogger logger = SemanticsActionLogger();
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.decrease.index,
value: 'd',
decreasedValue: 'c',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<input role="slider" aria-valuenow="1" aria-valuetext="d" aria-valuemax="1" aria-valuemin="0">
</sem>''');
final DomHTMLInputElement input =
owner().semanticsHost.querySelector('input')! as DomHTMLInputElement;
input.value = '0';
input.dispatchEvent(createDomEvent('Event', 'change'));
expect(await logger.idLog.first, 0);
expect(await logger.actionLog.first, ui.SemanticsAction.decrease);
semantics().semanticsEnabled = false;
});
test('renders a node that can both increment and decrement', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 |
ui.SemanticsAction.decrease.index |
ui.SemanticsAction.increase.index,
value: 'd',
increasedValue: 'e',
decreasedValue: 'c',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<input role="slider" aria-valuenow="1" aria-valuetext="d" aria-valuemax="2" aria-valuemin="0">
</sem>''');
semantics().semanticsEnabled = false;
});
test('sends focus events', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
void pumpSemantics({ required bool isFocused }) {
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
hasIncrease: true,
isFocusable: true,
isFocused: isFocused,
hasEnabledState: true,
isEnabled: true,
value: 'd',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
}
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
pumpSemantics(isFocused: false);
final DomElement element = owner().debugSemanticsTree![0]!.element.querySelector('input')!;
expect(capturedActions, isEmpty);
pumpSemantics(isFocused: true);
expect(capturedActions, <CapturedAction>[
(0, ui.SemanticsAction.didGainAccessibilityFocus, null),
]);
capturedActions.clear();
pumpSemantics(isFocused: false);
expect(
reason: 'The engine never calls blur() explicitly.',
capturedActions,
isEmpty,
);
element.blur();
expect(capturedActions, <CapturedAction>[
(0, ui.SemanticsAction.didLoseAccessibilityFocus, null),
]);
semantics().semanticsEnabled = false;
});
}
void _testTextField() {
test('renders a text field', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 | ui.SemanticsFlag.isTextField.index,
value: 'hello',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<input value="hello" />
</sem>''');
final SemanticsObject node = owner().debugSemanticsTree![0]!;
expect(node.primaryRole?.role, PrimaryRole.textField);
expect(
reason: 'Text fields use custom focus management',
node.primaryRole!.debugSecondaryRoles,
isNot(contains(Role.focusable)),
);
semantics().semanticsEnabled = false;
});
// TODO(yjbanov): this test will need to be adjusted for Safari when we add
// Safari testing.
test('sends a focus action when text field is activated', () async {
final SemanticsActionLogger logger = SemanticsActionLogger();
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.didGainAccessibilityFocus.index,
flags: 0 | ui.SemanticsFlag.isTextField.index,
value: 'hello',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
final DomElement textField =
owner().semanticsHost.querySelector('input[data-semantics-role="text-field"]')!;
expect(owner().semanticsHost.ownerDocument?.activeElement, isNot(textField));
textField.focus();
expect(owner().semanticsHost.ownerDocument?.activeElement, textField);
expect(await logger.idLog.first, 0);
expect(await logger.actionLog.first, ui.SemanticsAction.didGainAccessibilityFocus);
semantics().semanticsEnabled = false;
}, // TODO(yjbanov): https://github.com/flutter/flutter/issues/46638
// TODO(yjbanov): https://github.com/flutter/flutter/issues/50590
skip: browserEngine != BrowserEngine.blink);
}
void _testCheckables() {
test('renders a switched on switch element', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
label: 'test label',
flags: 0 |
ui.SemanticsFlag.isEnabled.index |
ui.SemanticsFlag.hasEnabledState.index |
ui.SemanticsFlag.hasToggledState.index |
ui.SemanticsFlag.isToggled.index |
ui.SemanticsFlag.isFocusable.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem aria-label="test label" flt-tappable role="switch" aria-checked="true" style="$rootSemanticStyle"></sem>
''');
final SemanticsObject node = owner().debugSemanticsTree![0]!;
expect(node.primaryRole?.role, PrimaryRole.checkable);
expect(
reason: 'Checkables use generic secondary roles',
node.primaryRole!.debugSecondaryRoles,
containsAll(<Role>[Role.focusable, Role.tappable]),
);
semantics().semanticsEnabled = false;
});
test('renders a switched on disabled switch element', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.hasToggledState.index |
ui.SemanticsFlag.isToggled.index |
ui.SemanticsFlag.hasEnabledState.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="switch" aria-disabled="true" aria-checked="true" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('renders a switched off switch element', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.hasToggledState.index |
ui.SemanticsFlag.isEnabled.index |
ui.SemanticsFlag.hasEnabledState.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="switch" flt-tappable aria-checked="false" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('renders a checked checkbox', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.isEnabled.index |
ui.SemanticsFlag.hasEnabledState.index |
ui.SemanticsFlag.hasCheckedState.index |
ui.SemanticsFlag.isChecked.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="checkbox" flt-tappable aria-checked="true" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('renders a checked disabled checkbox', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.hasCheckedState.index |
ui.SemanticsFlag.hasEnabledState.index |
ui.SemanticsFlag.isChecked.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="checkbox" aria-disabled="true" aria-checked="true" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('renders an unchecked checkbox', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.hasCheckedState.index |
ui.SemanticsFlag.isEnabled.index |
ui.SemanticsFlag.hasEnabledState.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="checkbox" flt-tappable aria-checked="false" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('renders a checked radio button', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.isEnabled.index |
ui.SemanticsFlag.hasEnabledState.index |
ui.SemanticsFlag.hasCheckedState.index |
ui.SemanticsFlag.isInMutuallyExclusiveGroup.index |
ui.SemanticsFlag.isChecked.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="radio" flt-tappable aria-checked="true" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('renders a checked disabled radio button', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.hasEnabledState.index |
ui.SemanticsFlag.hasCheckedState.index |
ui.SemanticsFlag.isInMutuallyExclusiveGroup.index |
ui.SemanticsFlag.isChecked.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="radio" aria-disabled="true" aria-checked="true" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('renders an unchecked checkbox', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.isEnabled.index |
ui.SemanticsFlag.hasEnabledState.index |
ui.SemanticsFlag.hasCheckedState.index |
ui.SemanticsFlag.isInMutuallyExclusiveGroup.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="radio" flt-tappable aria-checked="false" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('sends focus events', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
void pumpSemantics({ required bool isFocused }) {
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
// The following combination of actions and flags describe a checkbox.
hasTap: true,
hasEnabledState: true,
isEnabled: true,
hasCheckedState: true,
isFocusable: true,
isFocused: isFocused,
value: 'd',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
}
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
pumpSemantics(isFocused: false);
final DomElement element = owner().debugSemanticsTree![0]!.element;
expect(capturedActions, isEmpty);
pumpSemantics(isFocused: true);
expect(capturedActions, <CapturedAction>[
(0, ui.SemanticsAction.didGainAccessibilityFocus, null),
]);
capturedActions.clear();
// The framework removes focus from the widget (i.e. "blurs" it). Since the
// blurring is initiated by the framework, there's no need to send any
// notifications back to the framework about it.
pumpSemantics(isFocused: false);
expect(capturedActions, isEmpty);
// If the element is blurred by the browser, then we do want to notify the
// framework. This is because screen reader can be focused on something
// other than what the framework is focused on, and notifying the framework
// about the loss of focus on a node is information that the framework did
// not have before.
element.blur();
expect(capturedActions, <CapturedAction>[
(0, ui.SemanticsAction.didLoseAccessibilityFocus, null),
]);
semantics().semanticsEnabled = false;
});
}
void _testTappable() {
test('renders an enabled tappable widget', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
isFocusable: true,
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
expectSemanticsTree(owner(), '''
<sem role="button" flt-tappable style="$rootSemanticStyle"></sem>
''');
final SemanticsObject node = owner().debugSemanticsTree![0]!;
expect(node.primaryRole?.role, PrimaryRole.button);
expect(
node.primaryRole?.debugSecondaryRoles,
containsAll(<Role>[Role.focusable, Role.tappable]),
);
expect(tester.getSemanticsObject(0).element.tabIndex, 0);
semantics().semanticsEnabled = false;
});
test('renders a disabled tappable widget', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
actions: 0 | ui.SemanticsAction.tap.index,
flags: 0 |
ui.SemanticsFlag.hasEnabledState.index |
ui.SemanticsFlag.isButton.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="button" aria-disabled="true" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('can switch tappable between enabled and disabled', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
void updateTappable({required bool enabled}) {
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
hasTap: true,
hasEnabledState: true,
isEnabled: enabled,
isButton: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
}
updateTappable(enabled: false);
expectSemanticsTree(
owner(),
'<sem role="button" aria-disabled="true" style="$rootSemanticStyle"></sem>'
);
updateTappable(enabled: true);
expectSemanticsTree(
owner(),
'<sem role="button" flt-tappable style="$rootSemanticStyle"></sem>',
);
updateTappable(enabled: false);
expectSemanticsTree(
owner(),
'<sem role="button" aria-disabled="true" style="$rootSemanticStyle"></sem>',
);
updateTappable(enabled: true);
expectSemanticsTree(
owner(),
'<sem role="button" flt-tappable style="$rootSemanticStyle"></sem>',
);
semantics().semanticsEnabled = false;
});
test('focuses on tappable after element has been attached', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
isFocusable: true,
isFocused: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
expect(domDocument.activeElement, tester.getSemanticsObject(0).element);
semantics().semanticsEnabled = false;
});
test('sends focus events', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
void pumpSemantics({ required bool isFocused }) {
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
// The following combination of actions and flags describe a button.
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
isFocusable: true,
isFocused: isFocused,
value: 'd',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
}
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
pumpSemantics(isFocused: false);
final DomElement element = owner().debugSemanticsTree![0]!.element;
expect(capturedActions, isEmpty);
pumpSemantics(isFocused: true);
expect(capturedActions, <CapturedAction>[
(0, ui.SemanticsAction.didGainAccessibilityFocus, null),
]);
capturedActions.clear();
pumpSemantics(isFocused: false);
expect(capturedActions, isEmpty);
element.blur();
expect(capturedActions, <CapturedAction>[
(0, ui.SemanticsAction.didLoseAccessibilityFocus, null),
]);
semantics().semanticsEnabled = false;
});
// Regression test for: https://github.com/flutter/flutter/issues/134842
//
// If the click event is allowed to propagate through the hierarchy, then both
// the descendant and the parent will generate a SemanticsAction.tap, causing
// a double-tap to happen on the framework side.
test('inner tappable overrides ancestor tappable', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
isFocusable: true,
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 1,
isFocusable: true,
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
),
],
);
tester.apply();
expectSemanticsTree(owner(), '''
<sem flt-tappable role="button" style="$rootSemanticStyle">
<sem-c>
<sem flt-tappable role="button"></sem>
</sem-c>
</sem>
''');
// Tap on the outer element
{
final DomElement element = tester.getSemanticsObject(0).element;
final DomRect rect = element.getBoundingClientRect();
element.dispatchEvent(createDomMouseEvent('click', <Object?, Object?>{
'clientX': (rect.left + (rect.right - rect.left) / 2).floor(),
'clientY': (rect.top + (rect.bottom - rect.top) / 2).floor(),
}));
expect(capturedActions, <CapturedAction>[
(0, ui.SemanticsAction.tap, null),
]);
}
// Tap on the inner element
{
capturedActions.clear();
final DomElement element = tester.getSemanticsObject(1).element;
final DomRect rect = element.getBoundingClientRect();
element.dispatchEvent(createDomMouseEvent('click', <Object?, Object?>{
'bubbles': true,
'clientX': (rect.left + (rect.right - rect.left) / 2).floor(),
'clientY': (rect.top + (rect.bottom - rect.top) / 2).floor(),
}));
// The click on the inner element should not propagate to the parent to
// avoid sending a second SemanticsAction.tap action to the framework.
expect(capturedActions, <CapturedAction>[
(1, ui.SemanticsAction.tap, null),
]);
}
semantics().semanticsEnabled = false;
});
}
void _testImage() {
test('renders an image with no child nodes and with a label', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
flags: 0 | ui.SemanticsFlag.isImage.index,
label: 'Test Image Label',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="img" aria-label="Test Image Label" style="$rootSemanticStyle"></sem>
''');
semantics().semanticsEnabled = false;
});
test('renders an image with a child node and with a label', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
flags: 0 | ui.SemanticsFlag.isImage.index,
label: 'Test Image Label',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(10, 10, 20, 20),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-img role="img" aria-label="Test Image Label">
</sem-img>
<sem-c>
<sem></sem>
</sem-c>
</sem>''');
semantics().semanticsEnabled = false;
});
test('renders an image with no child nodes without a label', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
flags: 0 | ui.SemanticsFlag.isImage.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(
owner(),
'<sem role="img" style="$rootSemanticStyle"></sem>',
);
semantics().semanticsEnabled = false;
});
test('renders an image with a child node and without a label', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
flags: 0 | ui.SemanticsFlag.isImage.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(10, 10, 20, 20),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-img role="img">
</sem-img>
<sem-c>
<sem></sem>
</sem-c>
</sem>''');
semantics().semanticsEnabled = false;
});
}
class MockAccessibilityAnnouncements implements AccessibilityAnnouncements {
int announceInvoked = 0;
@override
void announce(String message, Assertiveness assertiveness) {
announceInvoked += 1;
}
@override
DomHTMLElement ariaLiveElementFor(Assertiveness assertiveness) {
throw UnsupportedError(
'ariaLiveElementFor is not supported in MockAccessibilityAnnouncements');
}
@override
void handleMessage(StandardMessageCodec codec, ByteData? data) {
throw UnsupportedError(
'handleMessage is not supported in MockAccessibilityAnnouncements!');
}
}
void _testLiveRegion() {
tearDown(() {
LiveRegion.debugOverrideAccessibilityAnnouncements(null);
});
test('announces the label after an update', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final MockAccessibilityAnnouncements mockAccessibilityAnnouncements =
MockAccessibilityAnnouncements();
LiveRegion.debugOverrideAccessibilityAnnouncements(mockAccessibilityAnnouncements);
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
label: 'This is a snackbar',
flags: 0 | ui.SemanticsFlag.isLiveRegion.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expect(mockAccessibilityAnnouncements.announceInvoked, 1);
semantics().semanticsEnabled = false;
});
test('does not announce anything if there is no label', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final MockAccessibilityAnnouncements mockAccessibilityAnnouncements =
MockAccessibilityAnnouncements();
LiveRegion.debugOverrideAccessibilityAnnouncements(mockAccessibilityAnnouncements);
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
flags: 0 | ui.SemanticsFlag.isLiveRegion.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expect(mockAccessibilityAnnouncements.announceInvoked, 0);
semantics().semanticsEnabled = false;
});
test('does not announce the same label over and over', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final MockAccessibilityAnnouncements mockAccessibilityAnnouncements =
MockAccessibilityAnnouncements();
LiveRegion.debugOverrideAccessibilityAnnouncements(mockAccessibilityAnnouncements);
ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
label: 'This is a snackbar',
flags: 0 | ui.SemanticsFlag.isLiveRegion.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expect(mockAccessibilityAnnouncements.announceInvoked, 1);
builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
label: 'This is a snackbar',
flags: 0 | ui.SemanticsFlag.isLiveRegion.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expect(mockAccessibilityAnnouncements.announceInvoked, 1);
semantics().semanticsEnabled = false;
});
}
void _testPlatformView() {
test('sets and updates aria-owns', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
// Set.
{
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
platformViewId: 5,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(
owner(),
'<sem aria-owns="flt-pv-5" style="$rootSemanticStyle"></sem>',
);
}
// Update.
{
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
platformViewId: 42,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(
owner(),
'<sem aria-owns="flt-pv-42" style="$rootSemanticStyle"></sem>',
);
}
semantics().semanticsEnabled = false;
});
test('is transparent w.r.t. hit testing', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
platformViewId: 5,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(
owner(),
'<sem aria-owns="flt-pv-5" style="$rootSemanticStyle"></sem>',
);
final DomElement element = owner().semanticsHost.querySelector('flt-semantics')!;
expect(element.style.pointerEvents, 'none');
semantics().semanticsEnabled = false;
});
// This test simulates the scenario of three child semantic nodes contained by
// a common parent. The first and the last nodes are plain leaf nodes. The
// middle node is a platform view node. Nodes overlap. The test hit tests
// various points and verifies that the correct DOM element receives the
// event. The test does this using `documentOrShadow.elementFromPoint`, which,
// if browsers are to be trusted, should do the same thing as if a pointer
// event landed at the given location.
//
// 0px -------------
// | |
// | | <- plain semantic node
// | 1 |
// 15px | -------------
// | | |
// 25px --| |
// | 2 | <- platform view
// | |
// 35px | -------------
// | | |
// 45px --| |
// | 3 | <- plain semantic node
// | |
// | |
// 60px -------------
test('is reachable via a hit test', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
ui_web.platformViewRegistry.registerViewFactory(
'test-platform-view',
(int viewId) => createDomHTMLDivElement()
..id = 'view-0'
..style.width = '100%'
..style.height = '100%',
);
await createPlatformView(0, 'test-platform-view');
final ui.SceneBuilder sceneBuilder = ui.SceneBuilder();
sceneBuilder.addPlatformView(
0,
offset: const ui.Offset(0, 15),
width: 20,
height: 30,
);
await renderScene(sceneBuilder.build());
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
final double dpr = EngineFlutterDisplay.instance.devicePixelRatio;
updateNode(builder,
rect: const ui.Rect.fromLTRB(0, 0, 20, 60),
childrenInTraversalOrder: Int32List.fromList(<int>[1, 2, 3]),
childrenInHitTestOrder: Int32List.fromList(<int>[1, 2, 3]),
transform: Float64List.fromList(Matrix4.diagonal3Values(dpr, dpr, 1).storage));
updateNode(
builder,
id: 1,
rect: const ui.Rect.fromLTRB(0, 0, 20, 25),
);
updateNode(
builder,
id: 2,
// This has to match the values passed to `addPlatformView` above.
rect: const ui.Rect.fromLTRB(0, 15, 20, 45),
platformViewId: 0,
);
updateNode(
builder,
id: 3,
rect: const ui.Rect.fromLTRB(0, 35, 20, 60),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem style="z-index: 3"></sem>
<sem style="z-index: 2" aria-owns="flt-pv-0"></sem>
<sem style="z-index: 1"></sem>
</sem-c>
</sem>''');
final DomElement root = owner().semanticsHost.querySelector('#flt-semantic-node-0')!;
expect(root.style.pointerEvents, 'none');
final DomElement child1 =
owner().semanticsHost.querySelector('#flt-semantic-node-1')!;
expect(child1.style.pointerEvents, 'all');
final DomRect child1Rect = child1.getBoundingClientRect();
expect(child1Rect.left, 0);
expect(child1Rect.top, 0);
expect(child1Rect.right, 20);
expect(child1Rect.bottom, 25);
final DomElement child2 =
owner().semanticsHost.querySelector('#flt-semantic-node-2')!;
expect(child2.style.pointerEvents, 'none');
final DomRect child2Rect = child2.getBoundingClientRect();
expect(child2Rect.left, 0);
expect(child2Rect.top, 15);
expect(child2Rect.right, 20);
expect(child2Rect.bottom, 45);
final DomElement child3 =
owner().semanticsHost.querySelector('#flt-semantic-node-3')!;
expect(child3.style.pointerEvents, 'all');
final DomRect child3Rect = child3.getBoundingClientRect();
expect(child3Rect.left, 0);
expect(child3Rect.top, 35);
expect(child3Rect.right, 20);
expect(child3Rect.bottom, 60);
final DomElement platformViewElement =
platformViewsHost.querySelector('#view-0')!;
final DomRect platformViewRect =
platformViewElement.getBoundingClientRect();
expect(platformViewRect.left, 0);
expect(platformViewRect.top, 15);
expect(platformViewRect.right, 20);
expect(platformViewRect.bottom, 45);
// Hit test child 1
expect(domDocument.elementFromPoint(10, 10), child1);
// Hit test overlap between child 1 and 2
// TODO(yjbanov): this is a known limitation, see https://github.com/flutter/flutter/issues/101439
expect(domDocument.elementFromPoint(10, 20), child1);
// Hit test child 2
// Clicking at the location of the middle semantics node should allow the
// event to go through the semantic tree and hit the platform view. Since
// platform views are projected into the shadow DOM from outside the shadow
// root, it would be reachable both from the shadow root (by hitting the
// corresponding <slot> tag) and from the document (by hitting the platform
// view element itself).
// Browsers disagree about which element should be returned when hit testing
// a shadow root. However, they do agree when hit testing `document`.
//
// See:
// * https://github.com/w3c/csswg-drafts/issues/556
// * https://bugzilla.mozilla.org/show_bug.cgi?id=1502369
expect(domDocument.elementFromPoint(10, 30), platformViewElement);
// Hit test overlap between child 2 and 3
expect(domDocument.elementFromPoint(10, 40), child3);
// Hit test child 3
expect(domDocument.elementFromPoint(10, 50), child3);
semantics().semanticsEnabled = false;
});
}
void _testGroup() {
test('nodes with children and labels use group role with aria label', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
label: 'this is a label for a group of elements',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="group" aria-label="this is a label for a group of elements" style="$rootSemanticStyle"><sem-c><sem></sem></sem-c></sem>
''');
semantics().semanticsEnabled = false;
});
}
void _testDialog() {
test('renders named and labeled routes', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
label: 'this is a dialog label',
flags: 0 | ui.SemanticsFlag.scopesRoute.index | ui.SemanticsFlag.namesRoute.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expectSemanticsTree(owner(), '''
<sem role="dialog" aria-label="this is a dialog label" style="$rootSemanticStyle"><sem-c><sem></sem></sem-c></sem>
''');
expect(
owner().debugSemanticsTree![0]!.primaryRole?.role,
PrimaryRole.dialog,
);
semantics().semanticsEnabled = false;
});
test('warns about missing label', () {
final List<String> warnings = <String>[];
printWarning = warnings.add;
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
flags: 0 | ui.SemanticsFlag.scopesRoute.index | ui.SemanticsFlag.namesRoute.index,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[1]),
childrenInTraversalOrder: Int32List.fromList(<int>[1]),
);
updateNode(
builder,
id: 1,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
owner().updateSemantics(builder.build());
expect(
warnings,
<String>[
'Semantic node 0 had both scopesRoute and namesRoute set, indicating a self-labelled dialog, but it is missing the label. A dialog should be labelled either by setting namesRoute on itself and providing a label, or by containing a child node with namesRoute that can describe it with its content.',
],
);
// But still sets the dialog role.
expectSemanticsTree(owner(), '''
<sem role="dialog" aria-label="" style="$rootSemanticStyle"><sem-c><sem></sem></sem-c></sem>
''');
expect(
owner().debugSemanticsTree![0]!.primaryRole?.role,
PrimaryRole.dialog,
);
semantics().semanticsEnabled = false;
});
test('dialog can be described by a descendant', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
void pumpSemantics({ required String label }) {
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
scopesRoute: true,
transform: Matrix4.identity().toFloat64(),
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 1,
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 2,
namesRoute: true,
label: label,
),
],
),
],
);
tester.apply();
expectSemanticsTree(owner(), '''
<sem role="dialog" aria-describedby="flt-semantic-node-2" style="$rootSemanticStyle">
<sem-c>
<sem>
<sem-c>
<sem role="text">$label</sem>
</sem-c>
</sem>
</sem-c>
</sem>
''');
}
pumpSemantics(label: 'Dialog label');
expect(
owner().debugSemanticsTree![0]!.primaryRole?.role,
PrimaryRole.dialog,
);
expect(
owner().debugSemanticsTree![2]!.primaryRole?.role,
PrimaryRole.generic,
);
expect(
owner().debugSemanticsTree![2]!.primaryRole?.debugSecondaryRoles,
contains(Role.routeName),
);
pumpSemantics(label: 'Updated dialog label');
semantics().semanticsEnabled = false;
});
test('scopesRoute alone sets the dialog role with no label', () {
final List<String> warnings = <String>[];
printWarning = warnings.add;
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
scopesRoute: true,
transform: Matrix4.identity().toFloat64(),
);
tester.apply();
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle"></sem>
''');
expect(
owner().debugSemanticsTree![0]!.primaryRole?.role,
PrimaryRole.dialog,
);
expect(
owner().debugSemanticsTree![0]!.primaryRole?.secondaryRoleManagers,
isNot(contains(Role.routeName)),
);
semantics().semanticsEnabled = false;
});
test('namesRoute alone has no effect', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
transform: Matrix4.identity().toFloat64(),
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 1,
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 2,
namesRoute: true,
label: 'Hello',
),
],
),
],
);
tester.apply();
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem>
<sem-c>
<sem role="text">Hello</sem>
</sem-c>
</sem>
</sem-c>
</sem>
''');
expect(
owner().debugSemanticsTree![0]!.primaryRole?.role,
PrimaryRole.generic,
);
expect(
owner().debugSemanticsTree![2]!.primaryRole?.debugSecondaryRoles,
contains(Role.routeName),
);
semantics().semanticsEnabled = false;
});
// Test the simple scenario of a dialog coming up and containing focusable
// descendants that are not initially focused. The expectation is that the
// first descendant will be auto-focused.
test('focuses on the first unfocused Focusable', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
scopesRoute: true,
transform: Matrix4.identity().toFloat64(),
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 1,
// None of the children should have isFocused set to `true` to make
// sure that the auto-focus logic kicks in.
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 2,
label: 'Button 1',
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
isFocusable: true,
isFocused: false,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
),
tester.updateNode(
id: 3,
label: 'Button 2',
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
isFocusable: true,
isFocused: false,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
),
],
),
],
);
tester.apply();
expect(
capturedActions,
<CapturedAction>[
(2, ui.SemanticsAction.didGainAccessibilityFocus, null),
],
);
semantics().semanticsEnabled = false;
});
// Test the scenario of a dialog coming up and containing focusable
// descendants with one of them explicitly requesting focus. The expectation
// is that the dialog will not attempt to auto-focus on anything and let the
// respective descendant take focus.
test('does nothing if a descendant asks for focus explicitly', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
scopesRoute: true,
transform: Matrix4.identity().toFloat64(),
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 1,
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 2,
label: 'Button 1',
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
isFocusable: true,
isFocused: false,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
),
tester.updateNode(
id: 3,
label: 'Button 2',
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
isFocusable: true,
// Asked for focus explicitly.
isFocused: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
),
],
),
],
);
tester.apply();
expect(
capturedActions,
<CapturedAction>[
(3, ui.SemanticsAction.didGainAccessibilityFocus, null),
],
);
semantics().semanticsEnabled = false;
});
// Test the scenario of a dialog coming up and containing non-focusable
// descendants that can have a11y focus. The expectation is that the first
// descendant will be auto-focused, even if it's not input-focusable.
test('focuses on the first non-focusable descedant', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
scopesRoute: true,
transform: Matrix4.identity().toFloat64(),
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 1,
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 2,
label: 'Heading',
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
),
tester.updateNode(
id: 3,
label: 'Click me!',
hasTap: true,
hasEnabledState: true,
isEnabled: true,
isButton: true,
isFocusable: true,
isFocused: false,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
),
],
),
],
);
tester.apply();
// The focused node is not focusable, so no notification is sent to the
// framework.
expect(capturedActions, isEmpty);
// However, the element should have gotten the focus.
final DomElement element = owner().debugSemanticsTree![2]!.element;
expect(element.tabIndex, -1);
expect(domDocument.activeElement, element);
semantics().semanticsEnabled = false;
});
// This mostly makes sure the engine doesn't crash if given a completely empty
// dialog trying to find something to focus on.
test('does nothing if nothing is focusable inside the dialog', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
scopesRoute: true,
transform: Matrix4.identity().toFloat64(),
);
tester.apply();
expect(capturedActions, isEmpty);
expect(domDocument.activeElement, domDocument.body);
semantics().semanticsEnabled = false;
});
}
typedef CapturedAction = (int nodeId, ui.SemanticsAction action, Object? args);
void _testFocusable() {
test('AccessibilityFocusManager can manage element focus', () async {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
void pumpSemantics() {
final ui.SemanticsUpdateBuilder builder = ui.SemanticsUpdateBuilder();
updateNode(
builder,
label: 'Dummy root element',
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
childrenInHitTestOrder: Int32List.fromList(<int>[]),
childrenInTraversalOrder: Int32List.fromList(<int>[]),
);
owner().updateSemantics(builder.build());
}
final List<CapturedAction> capturedActions = <CapturedAction>[];
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
capturedActions.add((event.nodeId, event.type, event.arguments));
};
expect(capturedActions, isEmpty);
final AccessibilityFocusManager manager = AccessibilityFocusManager(owner());
expect(capturedActions, isEmpty);
final DomElement element = createDomElement('test-element');
expect(element.tabIndex, -1);
domDocument.body!.append(element);
// Start managing element
manager.manage(1, element);
expect(element.tabIndex, 0);
expect(capturedActions, isEmpty);
expect(domDocument.activeElement, isNot(element));
// Request focus
manager.changeFocus(true);
pumpSemantics(); // triggers post-update callbacks
expect(domDocument.activeElement, element);
expect(capturedActions, <CapturedAction>[
(1, ui.SemanticsAction.didGainAccessibilityFocus, null),
]);
capturedActions.clear();
// Give up focus
manager.changeFocus(false);
pumpSemantics(); // triggers post-update callbacks
expect(capturedActions, isEmpty);
expect(domDocument.activeElement, element);
// Browser blurs the element
element.blur();
expect(domDocument.activeElement, isNot(element));
expect(capturedActions, <CapturedAction>[
(1, ui.SemanticsAction.didLoseAccessibilityFocus, null),
]);
capturedActions.clear();
// Request focus again
manager.changeFocus(true);
pumpSemantics(); // triggers post-update callbacks
expect(domDocument.activeElement, element);
expect(capturedActions, <CapturedAction>[
(1, ui.SemanticsAction.didGainAccessibilityFocus, null),
]);
capturedActions.clear();
// Double-request focus
manager.changeFocus(true);
pumpSemantics(); // triggers post-update callbacks
expect(domDocument.activeElement, element);
expect(
reason: 'Nothing should be sent to the framework on focus re-request.',
capturedActions, isEmpty);
capturedActions.clear();
// Stop managing
manager.stopManaging();
pumpSemantics(); // triggers post-update callbacks
expect(
reason: 'There should be no notification to the framework because the '
'framework should already know. Otherwise, it would not have '
'asked to stop managing the node.',
capturedActions,
isEmpty,
);
expect(domDocument.activeElement, element);
// Attempt to request focus when not managing an element.
element.blur();
manager.changeFocus(true);
pumpSemantics(); // triggers post-update callbacks
expect(
reason: 'Attempting to request focus on a node that is not managed should '
'not result in any notifications to the framework.',
capturedActions,
isEmpty,
);
expect(domDocument.activeElement, isNot(element));
semantics().semanticsEnabled = false;
});
test('applies generic Focusable role', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
{
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
transform: Matrix4.identity().toFloat64(),
children: <SemanticsNodeUpdate>[
tester.updateNode(
id: 1,
label: 'focusable text',
isFocusable: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
),
],
);
tester.apply();
}
expectSemanticsTree(owner(), '''
<sem style="$rootSemanticStyle">
<sem-c>
<sem role="text">focusable text</sem>
</sem-c>
</sem>
''');
final SemanticsObject node = owner().debugSemanticsTree![1]!;
expect(node.isFocusable, isTrue);
expect(
node.primaryRole?.role,
PrimaryRole.generic,
);
expect(
node.primaryRole?.debugSecondaryRoles,
contains(Role.focusable),
);
final DomElement element = node.element;
expect(domDocument.activeElement, isNot(element));
{
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 1,
label: 'test focusable',
isFocusable: true,
isFocused: true,
transform: Matrix4.identity().toFloat64(),
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
}
expect(domDocument.activeElement, element);
semantics().semanticsEnabled = false;
});
}
void _testLink() {
test('nodes with link: true creates anchor tag', () {
semantics()
..debugOverrideTimestampFunction(() => _testTime)
..semanticsEnabled = true;
SemanticsObject pumpSemantics() {
final SemanticsTester tester = SemanticsTester(owner());
tester.updateNode(
id: 0,
isLink: true,
rect: const ui.Rect.fromLTRB(0, 0, 100, 50),
);
tester.apply();
return tester.getSemanticsObject(0);
}
final SemanticsObject object = pumpSemantics();
expect(object.element.tagName.toLowerCase(), 'a');
});
}
/// A facade in front of [ui.SemanticsUpdateBuilder.updateNode] that
/// supplies default values for semantics attributes.
void updateNode(
ui.SemanticsUpdateBuilder builder, {
int id = 0,
int flags = 0,
int actions = 0,
int maxValueLength = 0,
int currentValueLength = 0,
int textSelectionBase = 0,
int textSelectionExtent = 0,
int platformViewId = -1, // -1 means not a platform view
int scrollChildren = 0,
int scrollIndex = 0,
double scrollPosition = 0.0,
double scrollExtentMax = 0.0,
double scrollExtentMin = 0.0,
double elevation = 0.0,
double thickness = 0.0,
ui.Rect rect = ui.Rect.zero,
String identifier = '',
String label = '',
List<ui.StringAttribute> labelAttributes = const <ui.StringAttribute>[],
String hint = '',
List<ui.StringAttribute> hintAttributes = const <ui.StringAttribute>[],
String value = '',
List<ui.StringAttribute> valueAttributes = const <ui.StringAttribute>[],
String increasedValue = '',
List<ui.StringAttribute> increasedValueAttributes =
const <ui.StringAttribute>[],
String decreasedValue = '',
List<ui.StringAttribute> decreasedValueAttributes =
const <ui.StringAttribute>[],
String tooltip = '',
ui.TextDirection textDirection = ui.TextDirection.ltr,
Float64List? transform,
Int32List? childrenInTraversalOrder,
Int32List? childrenInHitTestOrder,
Int32List? additionalActions,
}) {
transform ??= Float64List.fromList(Matrix4.identity().storage);
childrenInTraversalOrder ??= Int32List(0);
childrenInHitTestOrder ??= Int32List(0);
additionalActions ??= Int32List(0);
builder.updateNode(
id: id,
flags: flags,
actions: actions,
maxValueLength: maxValueLength,
currentValueLength: currentValueLength,
textSelectionBase: textSelectionBase,
textSelectionExtent: textSelectionExtent,
platformViewId: platformViewId,
scrollChildren: scrollChildren,
scrollIndex: scrollIndex,
scrollPosition: scrollPosition,
scrollExtentMax: scrollExtentMax,
scrollExtentMin: scrollExtentMin,
elevation: elevation,
thickness: thickness,
rect: rect,
identifier: identifier,
label: label,
labelAttributes: labelAttributes,
hint: hint,
hintAttributes: hintAttributes,
value: value,
valueAttributes: valueAttributes,
increasedValue: increasedValue,
increasedValueAttributes: increasedValueAttributes,
decreasedValue: decreasedValue,
decreasedValueAttributes: decreasedValueAttributes,
tooltip: tooltip,
textDirection: textDirection,
transform: transform,
childrenInTraversalOrder: childrenInTraversalOrder,
childrenInHitTestOrder: childrenInHitTestOrder,
additionalActions: additionalActions,
);
}
const MethodCodec codec = StandardMethodCodec();
/// Sends a platform message to create a Platform View with the given id and viewType.
Future<void> createPlatformView(int id, String viewType) {
final Completer<void> completer = Completer<void>();
ui.PlatformDispatcher.instance.sendPlatformMessage(
'flutter/platform_views',
codec.encodeMethodCall(MethodCall(
'create',
<String, dynamic>{
'id': id,
'viewType': viewType,
},
)),
(dynamic _) => completer.complete(),
);
return completer.future;
}
| engine/lib/web_ui/test/engine/semantics/semantics_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/semantics/semantics_test.dart",
"repo_id": "engine",
"token_count": 42438
} | 312 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui show Size;
void main() {
internalBootstrapBrowserTest(() => doTests);
}
void doTests() {
final DomElement sizeSource = createDomElement('div')
..style.display = 'block';
group('computePhysicalSize', () {
late CustomElementDimensionsProvider provider;
setUp(() {
sizeSource
..style.width = '10px'
..style.height = '10px';
domDocument.body!.append(sizeSource);
provider = CustomElementDimensionsProvider(sizeSource);
});
tearDown(() {
provider.close(); // cleanup
sizeSource.remove();
});
test('returns physical size of element (width * dpr)', () {
const double dpr = 2.5;
const double logicalWidth = 50;
const double logicalHeight = 75;
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(dpr);
sizeSource
..style.width = '${logicalWidth}px'
..style.height = '${logicalHeight}px';
const ui.Size expected = ui.Size(logicalWidth * dpr, logicalHeight * dpr);
final ui.Size computed = provider.computePhysicalSize();
expect(computed, expected);
});
});
group('computeKeyboardInsets', () {
late CustomElementDimensionsProvider provider;
setUp(() {
sizeSource
..style.width = '10px'
..style.height = '10px';
domDocument.body!.append(sizeSource);
provider = CustomElementDimensionsProvider(sizeSource);
});
tearDown(() {
provider.close(); // cleanup
sizeSource.remove();
});
test('from viewport physical size (simulated keyboard) - always zero', () {
// Simulate a 100px tall keyboard showing...
const double dpr = 2.5;
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(dpr);
const double keyboardGap = 100;
final double physicalHeight =
(domWindow.visualViewport!.height! + keyboardGap) * dpr;
final ViewPadding computed =
provider.computeKeyboardInsets(physicalHeight, false);
expect(computed.top, 0);
expect(computed.right, 0);
expect(computed.bottom, 0);
expect(computed.left, 0);
});
});
group('onResize Stream', () {
late CustomElementDimensionsProvider provider;
setUp(() async {
sizeSource
..style.width = '10px'
..style.height = '10px';
domDocument.body!.append(sizeSource);
provider = CustomElementDimensionsProvider(sizeSource);
// Let the DOM settle before starting the test, so we don't get the first
// 10,10 Size in the test. Otherwise, the ResizeObserver may trigger
// unexpectedly after the test has started, and break our "first" result.
await Future<void>.delayed(const Duration(milliseconds: 250));
});
tearDown(() {
provider.close(); // cleanup
sizeSource.remove();
});
test('funnels resize events on sizeSource', () async {
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(2.7);
sizeSource
..style.width = '100px'
..style.height = '100px';
expect(provider.onResize.first, completes);
expect(provider.computePhysicalSize(), const ui.Size(270, 270));
sizeSource
..style.width = '200px'
..style.height = '200px';
expect(provider.onResize.first, completes);
expect(provider.computePhysicalSize(), const ui.Size(540, 540));
sizeSource
..style.width = '300px'
..style.height = '300px';
expect(provider.onResize.first, completes);
expect(provider.computePhysicalSize(), const ui.Size(810, 810));
});
test('funnels DPR change events too', () async {
// Override the source of DPR events...
final StreamController<double> dprController =
StreamController<double>.broadcast();
// Inject the dprController stream into the CustomElementDimensionsProvider.
final CustomElementDimensionsProvider provider =
CustomElementDimensionsProvider(
sizeSource,
onDprChange: dprController.stream,
);
// Set and broadcast the mock DPR value
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(3.2);
dprController.add(3.2);
expect(provider.onResize.first, completes);
expect(provider.computePhysicalSize(), const ui.Size(32, 32));
});
test('closed by onHotRestart', () async {
// Register an onDone listener for the stream
final Completer<bool> completer = Completer<bool>();
provider.onResize.listen(null, onDone: () {
completer.complete(true);
});
// Should close the stream
provider.close();
sizeSource
..style.width = '100px'
..style.height = '100px';
// Give time to the mutationObserver to fire (if needed, it won't)
await Future<void>.delayed(const Duration(milliseconds: 100));
expect(provider.onResize.isEmpty, completion(isTrue));
expect(completer.future, completion(isTrue));
});
});
}
| engine/lib/web_ui/test/engine/view_embedder/dimensions_provider/custom_element_dimensions_provider_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/view_embedder/dimensions_provider/custom_element_dimensions_provider_test.dart",
"repo_id": "engine",
"token_count": 1976
} | 313 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart';
import 'package:web_engine_tester/golden_tester.dart';
import '../common/test_initialization.dart';
import 'paragraph/helper.dart';
DomElement get sceneHost =>
EnginePlatformDispatcher.instance.implicitView!.dom.renderingHost
.querySelector(DomManager.sceneHostTagName)!;
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
const Rect region = Rect.fromLTWH(0, 0, 500, 100);
late BitmapCanvas canvas;
void appendToScene() {
// Create a <flt-scene> element to make sure our CSS reset applies correctly.
final DomElement testScene = createDomElement('flt-scene');
if (isIosSafari) {
// Shrink to fit on the iPhone screen.
testScene.style.position = 'absolute';
testScene.style.transformOrigin = '0 0 0';
testScene.style.transform = 'scale(0.3)';
}
testScene.append(canvas.rootElement);
sceneHost.append(testScene);
}
setUpUnitTests(
withImplicitView: true,
emulateTesterEnvironment: false,
setUpTestViewDimensions: false,
);
tearDown(() {
sceneHost.querySelector('flt-scene')?.remove();
});
/// Draws several lines, some aligned precisely with the pixel grid, and some
/// that are offset by 0.5 vertically or horizontally.
///
/// The produced picture stresses the antialiasing generated by the browser
/// when positioning and rasterizing `<canvas>` tags. Aliasing artifacts can
/// be seen depending on pixel alignment and whether antialiasing happens
/// before or after rasterization.
void drawMisalignedLines(BitmapCanvas canvas) {
final SurfacePaintData linePaint = (SurfacePaint()
..style = PaintingStyle.stroke
..strokeWidth = 1)
.paintData;
final SurfacePaintData fillPaint =
(SurfacePaint()..style = PaintingStyle.fill).paintData;
canvas.translate(10, 10);
canvas.drawRect(
const Rect.fromLTWH(0, 0, 40, 40),
linePaint,
);
canvas.drawLine(
const Offset(10, 0),
const Offset(10, 40),
linePaint,
);
canvas.drawLine(
const Offset(20.5, 0),
const Offset(20, 40),
linePaint,
);
canvas.drawCircle(const Offset(30, 10), 3, fillPaint);
canvas.drawCircle(const Offset(30.5, 30), 3, fillPaint);
}
test('renders pixels that are not aligned inside the canvas', () async {
canvas = BitmapCanvas(const Rect.fromLTWH(0, 0, 60, 60),
RenderStrategy());
drawMisalignedLines(canvas);
appendToScene();
await matchGoldenFile('misaligned_pixels_in_canvas_test.png', region: region);
});
test('compensates for misalignment of the canvas', () async {
// Notice the 0.5 offset in the bounds rectangle. It's what causes the
// misalignment of canvas relative to the pixel grid. BitmapCanvas will
// shift its position back to 0.0 and at the same time it will it will
// compensate by shifting the contents of the canvas in the opposite
// direction.
canvas = BitmapCanvas(const Rect.fromLTWH(0.5, 0.5, 60, 60),
RenderStrategy());
canvas.clipRect(const Rect.fromLTWH(0, 0, 50, 50), ClipOp.intersect);
drawMisalignedLines(canvas);
appendToScene();
await matchGoldenFile('misaligned_canvas_test.png', region: region);
});
test('fill the whole canvas with color even when transformed', () async {
canvas = BitmapCanvas(const Rect.fromLTWH(0, 0, 50, 50),
RenderStrategy());
canvas.clipRect(const Rect.fromLTWH(0, 0, 50, 50), ClipOp.intersect);
canvas.translate(25, 25);
canvas.drawColor(const Color.fromRGBO(0, 255, 0, 1.0), BlendMode.src);
appendToScene();
await matchGoldenFile('bitmap_canvas_fills_color_when_transformed.png',
region: region);
});
test('fill the whole canvas with paint even when transformed', () async {
canvas = BitmapCanvas(const Rect.fromLTWH(0, 0, 50, 50),
RenderStrategy());
canvas.clipRect(const Rect.fromLTWH(0, 0, 50, 50), ClipOp.intersect);
canvas.translate(25, 25);
canvas.drawPaint(SurfacePaintData()
..color = const Color.fromRGBO(0, 255, 0, 1.0).value
..style = PaintingStyle.fill);
appendToScene();
await matchGoldenFile('bitmap_canvas_fills_paint_when_transformed.png',
region: region);
});
// This test reproduces text blurriness when two pieces of text appear inside
// two nested clips:
//
// ┌───────────────────────┐
// │ text in outer clip │
// │ ┌────────────────────┐│
// │ │ text in inner clip ││
// │ └────────────────────┘│
// └───────────────────────┘
//
// This test clips using canvas. See a similar test in `compositing_golden_test.dart`,
// which clips using layers.
//
// More details: https://github.com/flutter/flutter/issues/32274
test('renders clipped DOM text with high quality', () async {
final CanvasParagraph paragraph =
(ParagraphBuilder(ParagraphStyle(fontFamily: 'Roboto'))
..addText('Am I blurry?')).build() as CanvasParagraph;
paragraph.layout(const ParagraphConstraints(width: 1000));
final Rect canvasSize = Rect.fromLTRB(
0,
0,
paragraph.maxIntrinsicWidth + 16,
2 * paragraph.height + 32,
);
final Rect outerClip =
Rect.fromLTRB(0.5, 0.5, canvasSize.right, canvasSize.bottom);
final Rect innerClip = Rect.fromLTRB(0.5, canvasSize.bottom / 2 + 0.5,
canvasSize.right, canvasSize.bottom);
canvas = BitmapCanvas(canvasSize, RenderStrategy());
canvas.debugChildOverdraw = true;
canvas.clipRect(outerClip, ClipOp.intersect);
canvas.drawParagraph(paragraph, const Offset(8.5, 8.5));
canvas.clipRect(innerClip, ClipOp.intersect);
canvas.drawParagraph(paragraph, Offset(8.5, 8.5 + innerClip.top));
expect(
canvas.rootElement.querySelectorAll('flt-paragraph').map<String>((DomElement e) => e.innerText).toList(),
<String>['Am I blurry?', 'Am I blurry?'],
reason: 'Expected to render text using HTML',
);
appendToScene();
await matchGoldenFile(
'bitmap_canvas_draws_high_quality_text.png',
region: canvasSize,
);
}, testOn: 'chrome');
// NOTE: Chrome in --headless mode does not reproduce the bug that this test
// attempts to reproduce. However, it's still good to have this test
// for potential future regressions related to paint order.
test('draws text on top of canvas when transformed and clipped', () async {
final ParagraphBuilder builder = ParagraphBuilder(ParagraphStyle(
fontFamily: 'Ahem',
fontSize: 18,
));
const String text = 'This text is intentionally very long to make sure that it '
'breaks into multiple lines.';
builder.addText(text);
final CanvasParagraph paragraph = builder.build() as CanvasParagraph;
paragraph.layout(const ParagraphConstraints(width: 100));
final Rect canvasSize = Offset.zero & const Size(500, 500);
canvas = BitmapCanvas(canvasSize, RenderStrategy());
canvas.debugChildOverdraw = true;
final SurfacePaintData pathPaint = SurfacePaintData()
..color = 0xFF7F7F7F
..style = PaintingStyle.fill;
const double r = 200.0;
const double l = 50.0;
final Path path = (Path()
..moveTo(-l, -l)
..lineTo(0, -r)
..lineTo(l, -l)
..lineTo(r, 0)
..lineTo(l, l)
..lineTo(0, r)
..lineTo(-l, l)
..lineTo(-r, 0)
..close()).shift(const Offset(250, 250));
final SurfacePaintData borderPaint = SurfacePaintData()
..color = black.value
..style = PaintingStyle.stroke;
canvas.drawPath(path, pathPaint);
canvas.drawParagraph(paragraph, const Offset(180, 50));
canvas.drawRect(Rect.fromLTWH(180, 50, paragraph.width, paragraph.height), borderPaint);
expect(
canvas.rootElement.querySelectorAll('flt-paragraph').map<String?>((DomElement e) => e.text).toList(),
<String>[text],
reason: 'Expected to render text using HTML',
);
final SceneBuilder sb = SceneBuilder();
sb.pushTransform(Matrix4.diagonal3Values(EngineFlutterDisplay.instance.browserDevicePixelRatio,
EngineFlutterDisplay.instance.browserDevicePixelRatio, 1.0).toFloat64());
sb.pushTransform(Matrix4.rotationZ(math.pi / 2).toFloat64());
sb.pushOffset(0, -500);
sb.pushClipRect(canvasSize);
sb.pop();
sb.pop();
sb.pop();
sb.pop();
final SurfaceScene scene = sb.build() as SurfaceScene;
final DomElement sceneElement = scene.webOnlyRootElement!;
if (isIosSafari) {
// Shrink to fit on the iPhone screen.
sceneElement.style.position = 'absolute';
sceneElement.style.transformOrigin = '0 0 0';
sceneElement.style.transform = 'scale(0.3)';
}
sceneElement.querySelector('flt-clip')!.append(canvas.rootElement);
sceneHost.append(sceneElement);
await matchGoldenFile(
'bitmap_canvas_draws_text_on_top_of_canvas.png',
region: canvasSize,
);
});
// Regression test for https://github.com/flutter/flutter/issues/96498. When
// a picture is made of just text that can be rendered using plain HTML,
// BitmapCanvas should not create any <canvas> elements as they are expensive.
test('does not allocate bitmap canvas just for text', () async {
canvas = BitmapCanvas(const Rect.fromLTWH(0, 0, 50, 50), RenderStrategy());
final ParagraphBuilder builder = ParagraphBuilder(ParagraphStyle(fontFamily: 'Roboto'));
builder.addText('Hello');
final CanvasParagraph paragraph = builder.build() as CanvasParagraph;
paragraph.layout(const ParagraphConstraints(width: 1000));
canvas.drawParagraph(paragraph, const Offset(8.5, 8.5));
expect(
canvas.rootElement.querySelectorAll('canvas'),
isEmpty,
);
expect(
canvas.rootElement.querySelectorAll('flt-paragraph').single.innerText,
'Hello',
);
});
}
| engine/lib/web_ui/test/html/bitmap_canvas_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/bitmap_canvas_golden_test.dart",
"repo_id": "engine",
"token_count": 3747
} | 314 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart';
import '../../common/test_initialization.dart';
import '../screenshot.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
emulateTesterEnvironment: false,
setUpTestViewDimensions: false,
);
setUp(() async {
debugShowClipLayers = true;
SurfaceSceneBuilder.debugForgetFrameScene();
});
tearDown(() {
for (final DomNode scene in domDocument.querySelectorAll('flt-scene')) {
scene.remove();
}
});
test('drawColor should cover entire viewport', () async {
const Rect region = Rect.fromLTWH(0, 0, 400, 400);
final SurfaceSceneBuilder builder = SurfaceSceneBuilder();
final Picture testPicture = _drawTestPicture(region, useColor: true);
builder.addPicture(Offset.zero, testPicture);
await sceneScreenshot(builder, 'canvas_draw_color', region: region);
}, skip: true); // TODO(ferhat): matchGolden fails when a div covers viewport.
test('drawPaint should cover entire viewport', () async {
const Rect region = Rect.fromLTWH(0, 0, 400, 400);
final SurfaceSceneBuilder builder = SurfaceSceneBuilder();
final Picture testPicture = _drawTestPicture(region);
builder.addPicture(Offset.zero, testPicture);
await sceneScreenshot(builder, 'canvas_draw_paint', region: region);
}, skip: true); // TODO(ferhat): matchGolden fails when a div covers viewport.);
}
Picture _drawTestPicture(Rect region, {bool useColor = false}) {
final EnginePictureRecorder recorder = PictureRecorder() as EnginePictureRecorder;
const Rect r = Rect.fromLTWH(0, 0, 200, 200);
final RecordingCanvas canvas = recorder.beginRecording(r);
canvas.drawRect(
region.deflate(8.0),
Paint() as SurfacePaint
..style = PaintingStyle.fill
..color = const Color(0xFFE0E0E0)
);
canvas.transform(Matrix4.translationValues(50, 50, 0).storage);
if (useColor) {
canvas.drawColor(const Color.fromRGBO(0, 255, 0, 1), BlendMode.srcOver);
} else {
canvas.drawPaint(Paint() as SurfacePaint
..style = PaintingStyle.fill
..color = const Color.fromRGBO(0, 0, 255, 1));
}
canvas.drawCircle(
Offset(r.width/2, r.height/2), r.width/2,
Paint() as SurfacePaint
..style = PaintingStyle.fill
..color = const Color.fromRGBO(255, 0, 0, 1));
return recorder.endRecording();
}
| engine/lib/web_ui/test/html/drawing/canvas_draw_color_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/drawing/canvas_draw_color_golden_test.dart",
"repo_id": "engine",
"token_count": 914
} | 315 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' hide window;
import '../../common/test_initialization.dart';
import 'helper.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
withImplicitView: true,
emulateTesterEnvironment: false,
setUpTestViewDimensions: false,
);
void testEllipsis(EngineCanvas canvas) {
Offset offset = Offset.zero;
CanvasParagraph paragraph;
const double fontSize = 22.0;
const double width = 126.0;
const double padding = 20.0;
final SurfacePaintData borderPaint = SurfacePaintData()
..color = black.value
..style = PaintingStyle.stroke;
paragraph = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: fontSize, ellipsis: '...'),
(CanvasParagraphBuilder builder) {
builder.pushStyle(EngineTextStyle.only(color: blue));
builder.addText('Lorem ');
builder.pushStyle(EngineTextStyle.only(color: green));
builder.addText('ipsum');
},
)..layout(constrain(width));
canvas.drawParagraph(paragraph, offset);
canvas.drawRect(Rect.fromLTWH(offset.dx, offset.dy, width, paragraph.height), borderPaint);
offset = offset.translate(0, paragraph.height + padding);
paragraph = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: fontSize, ellipsis: '...'),
(CanvasParagraphBuilder builder) {
builder.pushStyle(EngineTextStyle.only(color: blue));
builder.addText('Lorem\n');
builder.pushStyle(EngineTextStyle.only(color: green));
builder.addText('ipsum ');
builder.pushStyle(EngineTextStyle.only(color: red));
builder.addText('dolor sit');
},
)..layout(constrain(width));
canvas.drawParagraph(paragraph, offset);
canvas.drawRect(Rect.fromLTWH(offset.dx, offset.dy, width, paragraph.height), borderPaint);
offset = offset.translate(0, paragraph.height + padding);
paragraph = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: fontSize, ellipsis: '...'),
(CanvasParagraphBuilder builder) {
builder.pushStyle(EngineTextStyle.only(color: blue));
builder.addText('Lorem\n');
builder.pushStyle(EngineTextStyle.only(color: green));
builder.addText('ipsum ');
builder.pushStyle(EngineTextStyle.only(color: red));
builder.addText('d');
builder.pushStyle(EngineTextStyle.only(color: black));
builder.addText('o');
builder.pushStyle(EngineTextStyle.only(color: blue));
builder.addText('l');
builder.pushStyle(EngineTextStyle.only(color: green));
builder.addText('o');
builder.pushStyle(EngineTextStyle.only(color: red));
builder.addText('r');
builder.pushStyle(EngineTextStyle.only(color: black));
builder.addText(' ');
builder.pushStyle(EngineTextStyle.only(color: blue));
builder.addText('s');
builder.pushStyle(EngineTextStyle.only(color: green));
builder.addText('i');
builder.pushStyle(EngineTextStyle.only(color: red));
builder.addText('t');
},
)..layout(constrain(width));
canvas.drawParagraph(paragraph, offset);
canvas.drawRect(Rect.fromLTWH(offset.dx, offset.dy, width, paragraph.height), borderPaint);
offset = offset.translate(0, paragraph.height + padding);
paragraph = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: fontSize, maxLines: 2, ellipsis: '...'),
(CanvasParagraphBuilder builder) {
builder.pushStyle(EngineTextStyle.only(color: blue));
builder.addText('Lorem');
builder.pushStyle(EngineTextStyle.only(color: green));
builder.addText('ipsu');
builder.pushStyle(EngineTextStyle.only(color: red));
builder.addText('mdolor');
builder.pushStyle(EngineTextStyle.only(color: black));
builder.addText('sit');
builder.pushStyle(EngineTextStyle.only(color: blue));
builder.addText('amet');
builder.pushStyle(EngineTextStyle.only(color: blue));
builder.addText('consectetur');
},
)..layout(constrain(width));
canvas.drawParagraph(paragraph, offset);
canvas.drawRect(Rect.fromLTWH(offset.dx, offset.dy, width, paragraph.height), borderPaint);
offset = offset.translate(0, paragraph.height + padding);
}
test('ellipsis', () {
const Rect bounds = Rect.fromLTWH(0, 0, 300, 300);
final EngineCanvas canvas = BitmapCanvas(bounds, RenderStrategy());
testEllipsis(canvas);
return takeScreenshot(canvas, bounds, 'canvas_paragraph_ellipsis');
});
test('ellipsis (dom)', () {
const Rect bounds = Rect.fromLTWH(0, 0, 300, 300);
final EngineCanvas canvas = DomCanvas(domDocument.createElement('flt-picture'));
testEllipsis(canvas);
return takeScreenshot(canvas, bounds, 'canvas_paragraph_ellipsis_dom');
});
}
| engine/lib/web_ui/test/html/paragraph/overflow_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/paragraph/overflow_golden_test.dart",
"repo_id": "engine",
"token_count": 1955
} | 316 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart';
import '../../common/test_initialization.dart';
import '../screenshot.dart';
// TODO(yjbanov): unskip Firefox tests when Firefox implements WebGL in headless mode.
// https://github.com/flutter/flutter/issues/86623
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
const double screenWidth = 600.0;
const double screenHeight = 800.0;
const Rect screenRect = Rect.fromLTWH(0, 0, screenWidth, screenHeight);
const Rect region = Rect.fromLTWH(0, 0, 500, 240);
setUpUnitTests(withImplicitView: true);
test('Paints sweep gradient rectangles', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
final SurfacePaint borderPaint = SurfacePaint()
..style = PaintingStyle.stroke
..strokeWidth = 1
..color = const Color(0xFF000000);
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38),
Color(0xFFFF8C42),
Color(0xFFFFF275),
Color(0xFF6699CC),
Color(0xFF656D78),];
const List<double> stops = <double>[0.0, 0.05, 0.4, 0.6, 0.9, 1.0];
GradientSweep sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
0, 360.0 / 180.0 * math.pi,
null);
final GradientSweep sweepGradientRotated = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
0, 360.0 / 180.0 * math.pi,
Matrix4.rotationZ(math.pi / 6.0).storage);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
// Gradient with default center.
Rect rectBounds = const Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Gradient with shifted center and rotation.
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradientRotated, Rect.fromLTWH(rectBounds.center.dx, rectBounds.top, rectBounds.width / 2, rectBounds.height)));
canvas.drawRect(rectBounds, borderPaint);
// Gradient with start/endangle.
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
math.pi / 6, 3 * math.pi / 4,
null);
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Tile mode repeat
rectBounds = const Rect.fromLTWH(10, 110, kBoxWidth, kBoxHeight);
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.repeated,
math.pi / 6, 3 * math.pi / 4,
null);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Tile mode mirror
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.mirror,
math.pi / 6, 3 * math.pi / 4,
null);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
canvas.restore();
await canvasScreenshot(canvas, 'sweep_gradient_rect', canvasRect: screenRect, region: region);
}, skip: isFirefox);
test('Paints sweep gradient ovals', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
final SurfacePaint borderPaint = SurfacePaint()
..style = PaintingStyle.stroke
..strokeWidth = 1
..color = const Color(0xFF000000);
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38),
Color(0xFFFF8C42),
Color(0xFFFFF275),
Color(0xFF6699CC),
Color(0xFF656D78),];
final List<double> stops = <double>[0.0, 0.05, 0.4, 0.6, 0.9, 1.0];
GradientSweep sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
0, 360.0 / 180.0 * math.pi,
null);
final GradientSweep sweepGradientRotated = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
0, 360.0 / 180.0 * math.pi,
Matrix4.rotationZ(math.pi / 6.0).storage);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
// Gradient with default center.
Rect rectBounds = const Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
canvas.drawOval(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Gradient with shifted center and rotation.
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
canvas.drawOval(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradientRotated, Rect.fromLTWH(rectBounds.center.dx, rectBounds.top, rectBounds.width / 2, rectBounds.height)));
canvas.drawRect(rectBounds, borderPaint);
// Gradient with start/endangle.
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
math.pi / 6, 3 * math.pi / 4,
null);
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
canvas.drawOval(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Tile mode repeat
rectBounds = const Rect.fromLTWH(10, 110, kBoxWidth, kBoxHeight);
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.repeated,
math.pi / 6, 3 * math.pi / 4,
null);
canvas.drawOval(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Tile mode mirror
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.mirror,
math.pi / 6, 3 * math.pi / 4,
null);
canvas.drawOval(rectBounds,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
canvas.restore();
await canvasScreenshot(canvas, 'sweep_gradient_oval', canvasRect: screenRect, region: region);
}, skip: isFirefox);
test('Paints sweep gradient paths', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
final SurfacePaint borderPaint = SurfacePaint()
..style = PaintingStyle.stroke
..strokeWidth = 1
..color = const Color(0xFF000000);
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38),
Color(0xFFFF8C42),
Color(0xFFFFF275),
Color(0xFF6699CC),
Color(0xFF656D78),];
const List<double> stops = <double>[0.0, 0.05, 0.4, 0.6, 0.9, 1.0];
GradientSweep sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
0, 360.0 / 180.0 * math.pi,
null);
final GradientSweep sweepGradientRotated = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
0, 360.0 / 180.0 * math.pi,
Matrix4.rotationZ(math.pi / 6.0).storage);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
// Gradient with default center.
Rect rectBounds = const Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
Path path = samplePathFromRect(rectBounds);
canvas.drawPath(path,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Gradient with shifted center and rotation.
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
path = samplePathFromRect(rectBounds);
canvas.drawPath(path,
SurfacePaint()..shader = engineGradientToShader(sweepGradientRotated, Rect.fromLTWH(rectBounds.center.dx, rectBounds.top, rectBounds.width / 2, rectBounds.height)));
canvas.drawRect(rectBounds, borderPaint);
// Gradient with start/endangle.
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.clamp,
math.pi / 6, 3 * math.pi / 4,
null);
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
path = samplePathFromRect(rectBounds);
canvas.drawPath(path,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Tile mode repeat
rectBounds = const Rect.fromLTWH(10, 110, kBoxWidth, kBoxHeight);
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.repeated,
math.pi / 6, 3 * math.pi / 4,
null);
path = samplePathFromRect(rectBounds);
canvas.drawPath(path,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Tile mode mirror
rectBounds = rectBounds.translate(kBoxWidth + 10, 0);
sweepGradient = GradientSweep(const Offset(0.5, 0.5),
colors, stops, TileMode.mirror,
math.pi / 6, 3 * math.pi / 4,
null);
path = samplePathFromRect(rectBounds);
canvas.drawPath(path,
SurfacePaint()..shader = engineGradientToShader(sweepGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
canvas.restore();
await canvasScreenshot(canvas, 'sweep_gradient_path', canvasRect: screenRect, region: region);
}, skip: isFirefox);
/// Regression test for https://github.com/flutter/flutter/issues/74137.
test('Paints rotated and shifted linear gradient', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
final SurfacePaint borderPaint = SurfacePaint()
..style = PaintingStyle.stroke
..strokeWidth = 1
..color = const Color(0xFF000000);
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38),
Color(0xFFFF8C42),
Color(0xFFFFF275),
Color(0xFF6699CC),
Color(0xFF656D78),];
const List<double> stops = <double>[0.0, 0.05, 0.4, 0.6, 0.9, 1.0];
GradientLinear linearGradient = GradientLinear(const Offset(50, 50),
const Offset(200,130),
colors, stops, TileMode.clamp,
Matrix4.identity().storage);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
// Gradient with default center.
Rect rectBounds = const Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineLinearGradientToShader(linearGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Tile mode repeat
rectBounds = const Rect.fromLTWH(10, 110, kBoxWidth, kBoxHeight);
linearGradient = GradientLinear(const Offset(50, 50),
const Offset(200,130),
colors, stops, TileMode.repeated,
Matrix4.identity().storage);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineLinearGradientToShader(linearGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
canvas.restore();
await canvasScreenshot(canvas, 'linear_gradient_rect_shifted', canvasRect: screenRect, region: region);
}, skip: isFirefox);
/// Regression test for https://github.com/flutter/flutter/issues/82748.
test('Paints gradient with gradient stop outside range', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
final SurfacePaint borderPaint = SurfacePaint()
..style = PaintingStyle.stroke
..strokeWidth = 1
..color = const Color(0xFF000000);
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38)];
const List<double> stops = <double>[0.0, 10.0];
final GradientLinear linearGradient = GradientLinear(const Offset(50, 50),
const Offset(200,130),
colors, stops, TileMode.clamp,
Matrix4.identity().storage);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
// Gradient with default center.
const Rect rectBounds = Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineLinearGradientToShader(linearGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
canvas.restore();
final EngineCanvas engineCanvas = BitmapCanvas(screenRect,
RenderStrategy());
canvas.endRecording();
canvas.apply(engineCanvas, screenRect);
}, skip: isFirefox);
test("Creating lots of gradients doesn't create too many webgl contexts",
() async {
final DomCanvasElement sideCanvas =
createDomCanvasElement(width: 5, height: 5);
final DomCanvasRenderingContextWebGl? context =
sideCanvas.getContext('webgl') as DomCanvasRenderingContextWebGl?;
expect(context, isNotNull);
final EngineCanvas engineCanvas =
BitmapCanvas(const Rect.fromLTRB(0, 0, 100, 100), RenderStrategy());
for (double x = 0; x < 100; x += 10) {
for (double y = 0; y < 100; y += 10) {
const List<Color> colors = <Color>[
Color(0xFFFF0000),
Color(0xFF0000FF),
];
final GradientLinear linearGradient = GradientLinear(
Offset.zero,
const Offset(10, 10),
colors,
null,
TileMode.clamp,
Matrix4.identity().storage);
engineCanvas.drawRect(Rect.fromLTWH(x, y, 10, 10),
SurfacePaintData()..shader = linearGradient);
}
}
expect(context!.isContextLost(), isFalse);
}, skip: isFirefox);
test('Paints clamped, rotated and shifted linear gradient', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
final SurfacePaint borderPaint = SurfacePaint()
..style = PaintingStyle.stroke
..strokeWidth = 1
..color = const Color(0xFF000000);
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38),
Color(0xFFFF8C42),
Color(0xFFFFF275),
Color(0xFF6699CC),
Color(0xFF656D78),];
const List<double> stops = <double>[0.0, 0.05, 0.4, 0.6, 0.9, 1.0];
GradientLinear linearGradient = GradientLinear(const Offset(50, 50),
const Offset(200,130),
colors, stops, TileMode.clamp,
Matrix4.identity().storage);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
// Gradient with default center.
Rect rectBounds = const Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineLinearGradientToShader(linearGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
// Tile mode repeat
rectBounds = const Rect.fromLTWH(10, 110, kBoxWidth, kBoxHeight);
linearGradient = GradientLinear(const Offset(50, 50),
const Offset(200,130),
colors, stops, TileMode.clamp,
Matrix4.identity().storage);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineLinearGradientToShader(linearGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
canvas.restore();
await canvasScreenshot(canvas, 'linear_gradient_rect_clamp_rotated', canvasRect: screenRect, region: region);
}, skip: isFirefox);
test('Paints linear gradient properly when within svg context', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 240));
canvas.save();
canvas.renderStrategy.isInsideSvgFilterTree = true;
final SurfacePaint borderPaint = SurfacePaint()
..style = PaintingStyle.stroke
..strokeWidth = 1
..color = const Color(0xFF000000);
const List<Color> colors = <Color>[
Color(0xFFFF0000),
Color(0xFF0000FF),
];
final GradientLinear linearGradient = GradientLinear(const Offset(125, 75),
const Offset(175, 125),
colors, null, TileMode.clamp,
Matrix4.identity().storage);
const double kBoxWidth = 150;
const double kBoxHeight = 100;
// Gradient with default center.
const Rect rectBounds = Rect.fromLTWH(100, 50, kBoxWidth, kBoxHeight);
canvas.drawRect(rectBounds,
SurfacePaint()..shader = engineLinearGradientToShader(linearGradient, rectBounds));
canvas.drawRect(rectBounds, borderPaint);
canvas.restore();
await canvasScreenshot(canvas, 'linear_gradient_in_svg_context', canvasRect: screenRect, region: region);
}, skip: isFirefox);
test('Paints transformed linear gradient', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38),
Color(0xFFFF8C42),
Color(0xFFFFF275),
Color(0xFF6699CC),
Color(0xFF656D78),
];
const List<double> stops = <double>[0.0, 0.05, 0.4, 0.6, 0.9, 1.0];
final Matrix4 transform = Matrix4.identity()
..translate(50, 50)
..scale(0.3, 0.7)
..rotateZ(0.5);
final GradientLinear linearGradient = GradientLinear(
const Offset(5, 5),
const Offset(200, 130),
colors,
stops,
TileMode.clamp,
transform.storage,
);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
Rect rectBounds = const Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
canvas.drawRect(
rectBounds,
SurfacePaint()
..shader = engineLinearGradientToShader(linearGradient, rectBounds),
);
rectBounds = const Rect.fromLTWH(10, 110, kBoxWidth, kBoxHeight);
canvas.drawOval(
rectBounds,
SurfacePaint()
..shader = engineLinearGradientToShader(linearGradient, rectBounds),
);
canvas.restore();
await canvasScreenshot(
canvas,
'linear_gradient_clamp_transformed',
canvasRect: screenRect,
region: region,
);
}, skip: isFirefox);
test('Paints transformed sweep gradient', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38),
Color(0xFFFF8C42),
Color(0xFFFFF275),
Color(0xFF6699CC),
Color(0xFF656D78),
];
const List<double> stops = <double>[0.0, 0.05, 0.4, 0.6, 0.9, 1.0];
final Matrix4 transform = Matrix4.identity()
..translate(100, 150)
..scale(0.3, 0.7)
..rotateZ(0.5);
final GradientSweep sweepGradient = GradientSweep(
const Offset(0.5, 0.5),
colors,
stops,
TileMode.clamp,
0.0,
2 * math.pi,
transform.storage,
);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
Rect rectBounds = const Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
canvas.drawRect(
rectBounds,
SurfacePaint()
..shader = engineGradientToShader(sweepGradient, rectBounds),
);
rectBounds = const Rect.fromLTWH(10, 110, kBoxWidth, kBoxHeight);
canvas.drawOval(
rectBounds,
SurfacePaint()
..shader = engineGradientToShader(sweepGradient, rectBounds),
);
canvas.restore();
await canvasScreenshot(
canvas,
'sweep_gradient_clamp_transformed',
canvasRect: screenRect,
region: region,
);
}, skip: isFirefox);
test('Paints transformed radial gradient', () async {
final RecordingCanvas canvas =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
canvas.save();
const List<Color> colors = <Color>[
Color(0xFF000000),
Color(0xFFFF3C38),
Color(0xFFFF8C42),
Color(0xFFFFF275),
Color(0xFF6699CC),
Color(0xFF656D78),
];
const List<double> stops = <double>[0.0, 0.05, 0.4, 0.6, 0.9, 1.0];
final Matrix4 transform = Matrix4.identity()
..translate(50, 50)
..scale(0.3, 0.7)
..rotateZ(0.5);
final GradientRadial radialGradient = GradientRadial(
const Offset(0.5, 0.5),
400,
colors,
stops,
TileMode.clamp,
transform.storage,
);
const double kBoxWidth = 150;
const double kBoxHeight = 80;
Rect rectBounds = const Rect.fromLTWH(10, 20, kBoxWidth, kBoxHeight);
canvas.drawRect(
rectBounds,
SurfacePaint()
..shader = engineRadialGradientToShader(radialGradient, rectBounds),
);
rectBounds = const Rect.fromLTWH(10, 110, kBoxWidth, kBoxHeight);
canvas.drawOval(
rectBounds,
SurfacePaint()
..shader = engineRadialGradientToShader(radialGradient, rectBounds),
);
canvas.restore();
await canvasScreenshot(
canvas,
'radial_gradient_clamp_transformed',
canvasRect: screenRect,
region: region,
);
}, skip: isFirefox);
}
Shader engineGradientToShader(GradientSweep gradient, Rect rect) {
return Gradient.sweep(
Offset(rect.left + gradient.center.dx * rect.width,
rect.top + gradient.center.dy * rect.height),
gradient.colors, gradient.colorStops, gradient.tileMode,
gradient.startAngle,
gradient.endAngle,
gradient.matrix4 == null ? null :
Float64List.fromList(gradient.matrix4!),
);
}
Shader engineLinearGradientToShader(GradientLinear gradient, Rect rect) {
return Gradient.linear(gradient.from, gradient.to,
gradient.colors, gradient.colorStops, gradient.tileMode,
gradient.matrix4 == null ? null : Float64List.fromList(
gradient.matrix4!.matrix),
);
}
Shader engineRadialGradientToShader(GradientRadial gradient, Rect rect) {
return Gradient.radial(
Offset(rect.left + gradient.center.dx * rect.width,
rect.top + gradient.center.dy * rect.height),
gradient.radius,
gradient.colors,
gradient.colorStops,
gradient.tileMode,
gradient.matrix4 == null ? null : Float64List.fromList(gradient.matrix4!),
);
}
Path samplePathFromRect(Rect rectBounds) =>
Path()
..moveTo(rectBounds.center.dx, rectBounds.top)
..lineTo(rectBounds.left, rectBounds.bottom)
..quadraticBezierTo(rectBounds.center.dx + 20, rectBounds.bottom - 40,
rectBounds.right, rectBounds.bottom)
..close();
| engine/lib/web_ui/test/html/shaders/gradient_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/shaders/gradient_golden_test.dart",
"repo_id": "engine",
"token_count": 9344
} | 317 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/// Parses raw test data into a list of [TestCase] objects.
List<TestCase> parseRawTestData(String rawTestData, {required bool isV8}) {
return rawTestData
.split('\n')
.where(isValidTestCase)
.map((String line) => _checkReplacement(line, isV8: isV8))
.map(_parse)
.toList();
}
bool isValidTestCase(String line) {
return line.startsWith('×');
}
String _checkReplacement(String line, {required bool isV8}) {
String replacement = line;
// Special cases for rules LB8, LB11, LB13, LB14, LB15, LB16, LB17 to allow
// line breaks after spaces.
final RegExp spacesRegex = RegExp(r'SPACE \(SP\) × \[(8|11|13|14|15|16|17)\.');
if (replacement.contains(spacesRegex)) {
replacement = replacement
.replaceAll('0020 ×', '0020 ÷') // SPACE (SP)
.replaceAllMapped(spacesRegex, (Match m) => 'SPACE (SP) ÷ [${m.group(1)}.');
}
if (!isV8) {
// Some test cases contradict rule LB25, so we are fixing them with the few
// regexes below.
final RegExp lb25Regex1 = RegExp(r'\((CP_CP30|CL)\)(.*?) ÷ \[999\.0\] (PERCENT|DOLLAR)');
if (replacement.contains(lb25Regex1)) {
replacement = replacement
.replaceAll(' ÷ 0024', ' × 0024') // DOLLAR SIGN (PR)
.replaceAll(' ÷ 0025', ' × 0025') // PERCENT SIGN (PO)
.replaceAllMapped(
lb25Regex1,
(Match m) => '(${m.group(1)})${m.group(2)} × [999.0] ${m.group(3)}',
);
}
final RegExp lb25Regex2 = RegExp(r'\((IS|SY)\)(.*?) ÷ \[999\.0\] (DIGIT)');
if (replacement.contains(lb25Regex2)) {
replacement = replacement
.replaceAll(' ÷ 0030', ' × 0030') // DIGIT ZERO (NU)
.replaceAllMapped(
lb25Regex2,
(Match m) => '(${m.group(1)})${m.group(2)} × [999.0] ${m.group(3)}',
);
}
final RegExp lb25Regex3 = RegExp(r'\((PR|PO)\)(.*?) ÷ \[999\.0\] (LEFT)');
if (replacement.contains(lb25Regex3)) {
replacement = replacement
.replaceAll(' ÷ 0028', ' × 0028') // LEFT PARENTHESIS (OP_OP30)
.replaceAll(' ÷ 007B', ' × 007B') // LEFT CURLY BRACKET (OP_OP30)
.replaceAll(' ÷ 2329', ' × 2329') // LEFT-POINTING ANGLE BRACKET (OP)
.replaceAllMapped(
lb25Regex3,
(Match m) => '(${m.group(1)})${m.group(2)} × [999.0] ${m.group(3)}',
);
}
}
if (isV8) {
// v8BreakIterator deviates from the spec around Hiragana and Katakana
// letters.
final RegExp hiragana21Regex = RegExp(r' × \[21\.03\] (HIRAGANA LETTER|KATAKANA LETTER|KATAKANA-HIRAGANA)');
if (replacement.contains(hiragana21Regex) && !replacement.contains('(BB)') && !replacement.contains('(PR)')) {
replacement = replacement
.replaceAll(' × 3041', ' ÷ 3041') // HIRAGANA LETTER (CJ)
.replaceAll(' × 30E5', ' ÷ 30E5') // KATAKANA LETTER (CJ)
.replaceAll(' × 30FC', ' ÷ 30FC') // KATAKANA-HIRAGANA PROLONGED SOUND MARK (CJ)
.replaceAllMapped(
hiragana21Regex,
(Match m) => ' ÷ [21.03] ${m.group(1)}',
);
}
if (replacement.contains(' × [16.0] HIRAGANA LETTER')) {
replacement = replacement
.replaceAll(' × 3041', ' ÷ 3041') // HIRAGANA LETTER (CJ)
.replaceAll(
' × [16.0] HIRAGANA LETTER',
' ÷ [16.0] HIRAGANA LETTER',
);
}
final RegExp hiraganaPercentRegex = RegExp(r'HIRAGANA .*? ÷ \[999\.0\] PERCENT');
if (replacement.contains(hiraganaPercentRegex)) {
replacement = replacement
.replaceAll(' ÷ 0025', ' × 0025') // PERCENT SIGN (PO)
.replaceAll(
' ÷ [999.0] PERCENT',
' × [999.0] PERCENT',
);
}
// v8BreakIterator also deviates from the spec around hyphens, commas and
// full stops.
final RegExp hyphenRegex = RegExp(r'\((HY|IS)\)(.*?) ÷ \[999\.0\] (DIGIT|NUMBER|SECTION|THAI|<reserved-50005>)');
if (replacement.contains(hyphenRegex)) {
replacement = replacement
.replaceAll(' ÷ 0030', ' × 0030') // DIGIT ZERO (NU)
.replaceAll(' ÷ 0023', ' × 0023') // NUMBER SIGN (AL)
.replaceAll(' ÷ 00A7', ' × 00A7') // SECTION SIGN (AI_AL)
.replaceAll(' ÷ 0E01', ' × 0E01') // THAI CHARACTER KO KAI (SA_AL)
.replaceAll(' ÷ 50005', ' × 50005') // <reserved-50005> (XX_AL)
.replaceAllMapped(
hyphenRegex,
(Match m) => '(${m.group(1)})${m.group(2)} × [999.0] ${m.group(3)}',
);
}
}
return replacement;
}
final RegExp spaceRegex = RegExp(r'\s+');
final RegExp signRegex = RegExp(r'([×÷])\s+\[(\d+\.\d+)\]\s*');
final RegExp charRegex = RegExp(
r'([A-Z0-9-]+(?:\s+[A-Z0-9-]+)*)\s+\(([A-Z0-9_]+)\)\s*',
caseSensitive: false,
);
final RegExp charWithBracketsRegex = RegExp(
r'(\<[A-Z0-9()-]+(?:\s+[A-Z0-9()-]+)*\>)\s+\(([A-Z0-9_]+)\)\s*',
caseSensitive: false,
);
TestCase _parse(String line) {
final int hashIndex = line.indexOf('#');
final List<String> sequence =
line.substring(0, hashIndex).trim().split(spaceRegex);
final String explanation = line.substring(hashIndex + 1).trim();
final List<Sign> signs = <Sign>[];
final Match signMatch = signRegex.matchAsPrefix(explanation)!;
signs.add(Sign._(code: signMatch.group(1)!, rule: signMatch.group(2)!));
final List<Char> chars = <Char>[];
int i = signMatch.group(0)!.length;
while (i < explanation.length) {
final Match charMatch = explanation[i] == '<'
? charWithBracketsRegex.matchAsPrefix(explanation, i)!
: charRegex.matchAsPrefix(explanation, i)!;
final int charCode = int.parse(sequence[2 * chars.length + 1], radix: 16);
chars.add(Char._(
code: charCode,
name: charMatch.group(1)!,
property: charMatch.group(2)!,
));
i += charMatch.group(0)!.length;
final Match signMatch = signRegex.matchAsPrefix(explanation, i)!;
signs.add(Sign._(code: signMatch.group(1)!, rule: signMatch.group(2)!));
i += signMatch.group(0)!.length;
}
return TestCase._(signs: signs, chars: chars, raw: line);
}
/// Represents a character in a test case.
///
/// The character has a code, name and a property that determines how it behaves
/// with regards to line breaks.
class Char {
Char._({required this.code, required this.name, required this.property});
final int code;
final String name;
final String property;
/// Whether this character is a code point that gets encoded as a UTF-16
/// surrogate pair.
bool get isSurrogatePair => code > 0xFFFF;
}
/// Represents a sign between two characters in a test case.
///
/// The sign could either be "×" to indicate no line break, or "÷" to indicate
/// the existence of a line break opportunity.
class Sign {
Sign._({required this.code, required this.rule});
final String code;
final String rule;
bool get isBreakOpportunity => code == '÷';
}
/// Represents an entire test case.
///
/// A test case is a sequence of characters combined with signs between them.
/// The signs indicate where line break opportunities exist.
class TestCase {
TestCase._({required this.signs, required this.chars, required this.raw});
final List<Sign> signs;
final List<Char> chars;
final String raw;
Iterable<int> get charCodes => chars.map((Char char) => char.code);
/// Returns the text that this test case is covering.
String toText() {
return String.fromCharCodes(charCodes);
}
@override
String toString() {
return raw;
}
}
| engine/lib/web_ui/test/html/text/line_breaker_test_helper.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/text/line_breaker_test_helper.dart",
"repo_id": "engine",
"token_count": 3270
} | 318 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import '../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
setUp(() async {
await bootstrapAndRunApp(withImplicitView: true);
});
test('collects frame timings', () async {
final EnginePlatformDispatcher dispatcher = ui.PlatformDispatcher.instance as EnginePlatformDispatcher;
List<ui.FrameTiming>? timings;
dispatcher.onReportTimings = (List<ui.FrameTiming> data) {
timings = data;
};
Completer<void> frameDone = Completer<void>();
dispatcher.onDrawFrame = () {
final ui.SceneBuilder sceneBuilder = ui.SceneBuilder();
sceneBuilder
..pushOffset(0, 0)
..pop();
dispatcher.render(sceneBuilder.build()).then((_) {
frameDone.complete();
});
};
// Frame 1.
dispatcher.scheduleFrame();
await frameDone.future;
expect(timings, isNull, reason: "100 ms hasn't passed yet");
await Future<void>.delayed(const Duration(milliseconds: 150));
// Frame 2.
frameDone = Completer<void>();
dispatcher.scheduleFrame();
await frameDone.future;
expect(timings, hasLength(2), reason: '100 ms passed. 2 frames pumped.');
for (final ui.FrameTiming timing in timings!) {
expect(timing.vsyncOverhead, greaterThanOrEqualTo(Duration.zero));
expect(timing.buildDuration, greaterThanOrEqualTo(Duration.zero));
expect(timing.rasterDuration, greaterThanOrEqualTo(Duration.zero));
expect(timing.totalSpan, greaterThanOrEqualTo(Duration.zero));
expect(timing.layerCacheCount, equals(0));
expect(timing.layerCacheBytes, equals(0));
expect(timing.pictureCacheCount, equals(0));
expect(timing.pictureCacheBytes, equals(0));
}
});
}
| engine/lib/web_ui/test/ui/frame_timings_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/ui/frame_timings_test.dart",
"repo_id": "engine",
"token_count": 763
} | 319 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/ui.dart' as ui;
import '../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
emulateTesterEnvironment: false,
setUpTestViewDimensions: false,
);
test('blanks are equal to each other', () {
final ui.StrutStyle a = ui.StrutStyle();
final ui.StrutStyle b = ui.StrutStyle();
expect(a, b);
expect(a.hashCode, b.hashCode);
});
test('each property individually equal', () {
for (final String property in _populatorsA.keys) {
final _StrutStylePropertyPopulator populator = _populatorsA[property]!;
final _TestStrutStyleBuilder aBuilder = _TestStrutStyleBuilder();
populator(aBuilder);
final ui.StrutStyle a = aBuilder.build();
final _TestStrutStyleBuilder bBuilder = _TestStrutStyleBuilder();
populator(bBuilder);
final ui.StrutStyle b = bBuilder.build();
expect(reason: '$property property is equal', a, b);
expect(reason: '$property hashCode is equal', a.hashCode, b.hashCode);
}
});
test('each property individually not equal', () {
for (final String property in _populatorsA.keys) {
final _StrutStylePropertyPopulator populatorA = _populatorsA[property]!;
final _TestStrutStyleBuilder aBuilder = _TestStrutStyleBuilder();
populatorA(aBuilder);
final ui.StrutStyle a = aBuilder.build();
final _StrutStylePropertyPopulator populatorB = _populatorsB[property]!;
final _TestStrutStyleBuilder bBuilder = _TestStrutStyleBuilder();
populatorB(bBuilder);
final ui.StrutStyle b = bBuilder.build();
expect(reason: '$property property is not equal', a, isNot(b));
expect(reason: '$property hashCode is not equal', a.hashCode, isNot(b.hashCode));
}
});
test('all properties altogether equal', () {
final _TestStrutStyleBuilder aBuilder = _TestStrutStyleBuilder();
final _TestStrutStyleBuilder bBuilder = _TestStrutStyleBuilder();
for (final String property in _populatorsA.keys) {
final _StrutStylePropertyPopulator populator = _populatorsA[property]!;
populator(aBuilder);
populator(bBuilder);
}
final ui.StrutStyle a = aBuilder.build();
final ui.StrutStyle b = bBuilder.build();
expect(a, b);
expect(a.hashCode, b.hashCode);
});
test('all properties altogether not equal', () {
final _TestStrutStyleBuilder aBuilder = _TestStrutStyleBuilder();
final _TestStrutStyleBuilder bBuilder = _TestStrutStyleBuilder();
for (final String property in _populatorsA.keys) {
final _StrutStylePropertyPopulator populatorA = _populatorsA[property]!;
populatorA(aBuilder);
final _StrutStylePropertyPopulator populatorB = _populatorsB[property]!;
populatorB(bBuilder);
}
final ui.StrutStyle a = aBuilder.build();
final ui.StrutStyle b = bBuilder.build();
expect(a, isNot(b));
expect(a.hashCode, isNot(b.hashCode));
});
}
typedef _StrutStylePropertyPopulator = void Function(_TestStrutStyleBuilder builder);
final Map<String, _StrutStylePropertyPopulator> _populatorsA = <String, _StrutStylePropertyPopulator>{
'fontFamily': (_TestStrutStyleBuilder builder) { builder.fontFamily = 'Arial'; },
// Intentionally do not use const List to make sure Object.hashAll is used to compute hashCode
'fontFamilyFallback': (_TestStrutStyleBuilder builder) { builder.fontFamilyFallback = <String>['Roboto']; },
'fontSize': (_TestStrutStyleBuilder builder) { builder.fontSize = 12; },
'height': (_TestStrutStyleBuilder builder) { builder.height = 13; },
'leading': (_TestStrutStyleBuilder builder) { builder.leading = 0.1; },
'fontWeight': (_TestStrutStyleBuilder builder) { builder.fontWeight = ui.FontWeight.w400; },
'fontStyle': (_TestStrutStyleBuilder builder) { builder.fontStyle = ui.FontStyle.normal; },
'forceStrutHeight': (_TestStrutStyleBuilder builder) { builder.forceStrutHeight = false; },
'leadingDistribution': (_TestStrutStyleBuilder builder) { builder.leadingDistribution = ui.TextLeadingDistribution.proportional; },
};
final Map<String, _StrutStylePropertyPopulator> _populatorsB = <String, _StrutStylePropertyPopulator>{
'fontFamily': (_TestStrutStyleBuilder builder) { builder.fontFamily = 'Noto'; },
// Intentionally do not use const List to make sure Object.hashAll is used to compute hashCode
'fontFamilyFallback': (_TestStrutStyleBuilder builder) { builder.fontFamilyFallback = <String>['Verdana']; },
'fontSize': (_TestStrutStyleBuilder builder) { builder.fontSize = 12.1; },
'height': (_TestStrutStyleBuilder builder) { builder.height = 13.1; },
'leading': (_TestStrutStyleBuilder builder) { builder.leading = 0.2; },
'fontWeight': (_TestStrutStyleBuilder builder) { builder.fontWeight = ui.FontWeight.w600; },
'fontStyle': (_TestStrutStyleBuilder builder) { builder.fontStyle = ui.FontStyle.italic; },
'forceStrutHeight': (_TestStrutStyleBuilder builder) { builder.forceStrutHeight = true; },
'leadingDistribution': (_TestStrutStyleBuilder builder) { builder.leadingDistribution = ui.TextLeadingDistribution.even; },
};
class _TestStrutStyleBuilder {
String? fontFamily;
List<String>? fontFamilyFallback;
double? fontSize;
double? height;
double? leading;
ui.FontWeight? fontWeight;
ui.FontStyle? fontStyle;
bool? forceStrutHeight;
ui.TextLeadingDistribution? leadingDistribution;
ui.StrutStyle build() {
return ui.StrutStyle(
fontFamily: fontFamily,
fontFamilyFallback: fontFamilyFallback,
fontSize: fontSize,
height: height,
leading: leading,
fontWeight: fontWeight,
fontStyle: fontStyle,
forceStrutHeight: forceStrutHeight,
leadingDistribution: leadingDistribution,
);
}
}
| engine/lib/web_ui/test/ui/strut_style_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/ui/strut_style_test.dart",
"repo_id": "engine",
"token_count": 2032
} | 320 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/runtime/dart_isolate.h"
#include <cstdlib>
#include "flutter/fml/paths.h"
#include "flutter/runtime/dart_plugin_registrant.h"
#include "flutter/runtime/dart_vm.h"
#include "flutter/runtime/dart_vm_lifecycle.h"
#include "flutter/testing/dart_isolate_runner.h"
#include "flutter/testing/fixture_test.h"
#include "flutter/testing/testing.h"
// CREATE_NATIVE_ENTRY is leaky by design
// NOLINTBEGIN(clang-analyzer-core.StackAddressEscape)
namespace flutter {
namespace testing {
const std::string kKernelFileName = "plugin_registrant_kernel_blob.bin";
const std::string kElfFileName = "plugin_registrant_app_elf_snapshot.so";
class DartIsolateTest : public FixtureTest {
public:
DartIsolateTest() : FixtureTest(kKernelFileName, kElfFileName, "") {}
void OverrideDartPluginRegistrant(const std::string& override_value) {
dart_plugin_registrant_library_ = override_value;
dart_plugin_registrant_library_override =
dart_plugin_registrant_library_.c_str();
}
void SetUp() override {
std::string source_path = GetSourcePath();
if (source_path[0] != '/') {
// On windows we need an extra '/' prefix.
source_path = "/" + source_path;
}
std::string registrant_uri = std::string("file://") + source_path +
"flutter/runtime/fixtures/dart_tool/"
"flutter_build/dart_plugin_registrant.dart";
OverrideDartPluginRegistrant(registrant_uri);
}
void TearDown() override {
dart_plugin_registrant_library_override = nullptr;
}
std::string dart_plugin_registrant_library_;
};
TEST_F(DartIsolateTest, DartPluginRegistrantIsPresent) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
std::vector<std::string> messages;
fml::AutoResetWaitableEvent latch;
AddNativeCallback(
"PassMessage",
CREATE_NATIVE_ENTRY(([&latch, &messages](Dart_NativeArguments args) {
auto message = tonic::DartConverter<std::string>::FromDart(
Dart_GetNativeArgument(args, 0));
messages.push_back(message);
latch.Signal();
})));
auto settings = CreateSettingsForFixture();
auto did_throw_exception = false;
settings.unhandled_exception_callback = [&](const std::string& error,
const std::string& stack_trace) {
did_throw_exception = true;
return true;
};
auto vm_ref = DartVMRef::Create(settings);
auto thread = CreateNewThread();
TaskRunners task_runners(GetCurrentTestName(), //
thread, //
thread, //
thread, //
thread //
);
auto kernel_path =
fml::paths::JoinPaths({GetFixturesPath(), kKernelFileName});
auto isolate =
RunDartCodeInIsolate(vm_ref, settings, task_runners,
"mainForPluginRegistrantTest", {}, kernel_path);
ASSERT_TRUE(isolate);
ASSERT_EQ(isolate->get()->GetPhase(), DartIsolate::Phase::Running);
latch.Wait();
ASSERT_EQ(messages.size(), 1u);
ASSERT_EQ(messages[0], "_PluginRegistrant.register() was called");
}
TEST_F(DartIsolateTest, DartPluginRegistrantFromBackgroundIsolate) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
std::vector<std::string> messages;
fml::AutoResetWaitableEvent latch;
AddNativeCallback(
"PassMessage",
CREATE_NATIVE_ENTRY(([&latch, &messages](Dart_NativeArguments args) {
auto message = tonic::DartConverter<std::string>::FromDart(
Dart_GetNativeArgument(args, 0));
messages.push_back(message);
latch.Signal();
})));
auto settings = CreateSettingsForFixture();
auto did_throw_exception = false;
settings.unhandled_exception_callback = [&](const std::string& error,
const std::string& stack_trace) {
did_throw_exception = true;
return true;
};
auto vm_ref = DartVMRef::Create(settings);
auto thread = CreateNewThread();
TaskRunners task_runners(GetCurrentTestName(), //
thread, //
thread, //
thread, //
thread //
);
auto kernel_path =
fml::paths::JoinPaths({GetFixturesPath(), kKernelFileName});
auto isolate = RunDartCodeInIsolate(
vm_ref, settings, task_runners,
"callDartPluginRegistrantFromBackgroundIsolate", {}, kernel_path);
ASSERT_TRUE(isolate);
ASSERT_EQ(isolate->get()->GetPhase(), DartIsolate::Phase::Running);
latch.Wait();
ASSERT_EQ(messages.size(), 1u);
ASSERT_EQ(messages[0],
"_PluginRegistrant.register() was called on background isolate");
}
TEST_F(DartIsolateTest, DartPluginRegistrantNotFromBackgroundIsolate) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
std::vector<std::string> messages;
fml::AutoResetWaitableEvent latch;
AddNativeCallback(
"PassMessage",
CREATE_NATIVE_ENTRY(([&latch, &messages](Dart_NativeArguments args) {
auto message = tonic::DartConverter<std::string>::FromDart(
Dart_GetNativeArgument(args, 0));
messages.push_back(message);
latch.Signal();
})));
auto settings = CreateSettingsForFixture();
auto did_throw_exception = false;
settings.unhandled_exception_callback = [&](const std::string& error,
const std::string& stack_trace) {
did_throw_exception = true;
return true;
};
auto vm_ref = DartVMRef::Create(settings);
auto thread = CreateNewThread();
TaskRunners task_runners(GetCurrentTestName(), //
thread, //
thread, //
thread, //
thread //
);
auto kernel_path =
fml::paths::JoinPaths({GetFixturesPath(), kKernelFileName});
auto isolate = RunDartCodeInIsolate(
vm_ref, settings, task_runners,
"dontCallDartPluginRegistrantFromBackgroundIsolate", {}, kernel_path);
ASSERT_TRUE(isolate);
ASSERT_EQ(isolate->get()->GetPhase(), DartIsolate::Phase::Running);
latch.Wait();
ASSERT_EQ(messages.size(), 1u);
ASSERT_EQ(
messages[0],
"_PluginRegistrant.register() was not called on background isolate");
}
TEST_F(DartIsolateTest, DartPluginRegistrantWhenRegisteringBackgroundIsolate) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
std::vector<std::string> messages;
fml::AutoResetWaitableEvent latch;
AddNativeCallback(
"PassMessage",
CREATE_NATIVE_ENTRY(([&latch, &messages](Dart_NativeArguments args) {
auto message = tonic::DartConverter<std::string>::FromDart(
Dart_GetNativeArgument(args, 0));
messages.push_back(message);
latch.Signal();
})));
auto settings = CreateSettingsForFixture();
auto did_throw_exception = false;
settings.unhandled_exception_callback = [&](const std::string& error,
const std::string& stack_trace) {
did_throw_exception = true;
return true;
};
auto vm_ref = DartVMRef::Create(settings);
auto thread = CreateNewThread();
TaskRunners task_runners(GetCurrentTestName(), //
thread, //
thread, //
thread, //
thread //
);
auto kernel_path =
fml::paths::JoinPaths({GetFixturesPath(), kKernelFileName});
auto isolate = RunDartCodeInIsolate(
vm_ref, settings, task_runners,
"registerBackgroundIsolateCallsDartPluginRegistrant", {}, kernel_path);
ASSERT_TRUE(isolate);
ASSERT_EQ(isolate->get()->GetPhase(), DartIsolate::Phase::Running);
latch.Wait();
ASSERT_EQ(messages.size(), 1u);
ASSERT_EQ(messages[0],
"_PluginRegistrant.register() was called on background isolate");
}
} // namespace testing
} // namespace flutter
// NOLINTEND(clang-analyzer-core.StackAddressEscape)
| engine/runtime/dart_plugin_registrant_unittests.cc/0 | {
"file_path": "engine/runtime/dart_plugin_registrant_unittests.cc",
"repo_id": "engine",
"token_count": 3676
} | 321 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/runtime/dart_vm.h"
#include "flutter/runtime/dart_vm_lifecycle.h"
#include "flutter/testing/fixture_test.h"
#include "gtest/gtest.h"
namespace flutter {
namespace testing {
using DartVMTest = FixtureTest;
TEST_F(DartVMTest, SimpleInitialization) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
auto vm = DartVMRef::Create(CreateSettingsForFixture());
ASSERT_TRUE(vm);
}
TEST_F(DartVMTest, SimpleIsolateNameServer) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
auto vm = DartVMRef::Create(CreateSettingsForFixture());
ASSERT_TRUE(vm);
ASSERT_TRUE(vm.GetVMData());
auto ns = vm->GetIsolateNameServer();
ASSERT_EQ(ns->LookupIsolatePortByName("foobar"), ILLEGAL_PORT);
ASSERT_FALSE(ns->RemoveIsolateNameMapping("foobar"));
ASSERT_TRUE(ns->RegisterIsolatePortWithName(123, "foobar"));
ASSERT_FALSE(ns->RegisterIsolatePortWithName(123, "foobar"));
ASSERT_EQ(ns->LookupIsolatePortByName("foobar"), 123);
ASSERT_TRUE(ns->RemoveIsolateNameMapping("foobar"));
}
TEST_F(DartVMTest, OldGenHeapSize) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
auto settings = CreateSettingsForFixture();
settings.old_gen_heap_size = 1024;
auto vm = DartVMRef::Create(settings);
// There is no way to introspect on the heap size so we just assert the vm was
// created.
ASSERT_TRUE(vm);
}
TEST_F(DartVMTest, DisableTimelineEventHandler) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
fml::tracing::TraceSetTimelineEventHandler(nullptr);
auto settings = CreateSettingsForFixture();
settings.enable_timeline_event_handler = false;
auto vm = DartVMRef::Create(settings);
ASSERT_FALSE(fml::tracing::TraceHasTimelineEventHandler());
}
TEST_F(DartVMTest, TraceGetTimelineMicrosDoesNotGetClockWhenSystraceIsEnabled) {
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
auto settings = CreateSettingsForFixture();
settings.trace_systrace = true;
auto vm = DartVMRef::Create(settings);
ASSERT_EQ(-1, fml::tracing::TraceGetTimelineMicros());
}
} // namespace testing
} // namespace flutter
| engine/runtime/dart_vm_unittests.cc/0 | {
"file_path": "engine/runtime/dart_vm_unittests.cc",
"repo_id": "engine",
"token_count": 784
} | 322 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_RUNTIME_PTRACE_CHECK_H_
#define FLUTTER_RUNTIME_PTRACE_CHECK_H_
#include "flutter/common/settings.h"
#include "flutter/fml/build_config.h"
namespace flutter {
#define TRACING_CHECKS_NECESSARY \
FML_OS_IOS && !TARGET_OS_SIMULATOR && \
(FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG)
enum class TracingResult {
kNotAttempted,
kEnabled,
kNotNecessary = kEnabled,
kDisabled,
};
#if TRACING_CHECKS_NECESSARY
bool EnableTracingIfNecessaryImpl(const Settings& vm_settings);
TracingResult GetTracingResultImpl();
#endif // TRACING_CHECKS_NECESSARY
//------------------------------------------------------------------------------
/// @brief Enables tracing in the process so that JIT mode VMs may be
/// launched. Explicitly enabling tracing is not required on all
/// platforms. On platforms where it is not required, calling this
/// method will return true. If tracing is required but cannot be
/// enabled, it is the responsibility of the caller to display the
/// appropriate error message to the user as subsequent attempts to
/// launch the VM in JIT mode will cause process termination.
///
/// This method may be called multiple times and will return the
/// same result. There are no threading restrictions.
///
/// @param[in] vm_settings The settings used to launch the VM.
///
/// @return If tracing was enabled.
///
inline bool EnableTracingIfNecessary(const Settings& vm_settings) {
#if TRACING_CHECKS_NECESSARY
return EnableTracingIfNecessaryImpl(vm_settings);
#else // TRACING_CHECKS_NECESSARY
return true;
#endif // TRACING_CHECKS_NECESSARY
}
//------------------------------------------------------------------------------
/// @brief Returns if a tracing check has been performed and its result. To
/// enable tracing, the Settings object used to launch the VM is
/// required. Components may want to display messages based on the
/// result of a previous tracing check without actually having the
/// settings object. This accessor can be used instead.
///
/// @return The tracing result.
///
inline TracingResult GetTracingResult() {
#if TRACING_CHECKS_NECESSARY
return GetTracingResultImpl();
#else // TRACING_CHECKS_NECESSARY
return TracingResult::kNotNecessary;
#endif // TRACING_CHECKS_NECESSARY
}
} // namespace flutter
#endif // FLUTTER_RUNTIME_PTRACE_CHECK_H_
| engine/runtime/ptrace_check.h/0 | {
"file_path": "engine/runtime/ptrace_check.h",
"repo_id": "engine",
"token_count": 911
} | 323 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#define FML_USED_ON_EMBEDDER
#include "flutter/shell/common/animator.h"
#include <functional>
#include <future>
#include <memory>
#include "flutter/shell/common/shell_test.h"
#include "flutter/shell/common/shell_test_platform_view.h"
#include "flutter/testing/post_task_sync.h"
#include "flutter/testing/testing.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
// CREATE_NATIVE_ENTRY is leaky by design
// NOLINTBEGIN(clang-analyzer-core.StackAddressEscape)
namespace flutter {
namespace testing {
constexpr int64_t kImplicitViewId = 0;
class FakeAnimatorDelegate : public Animator::Delegate {
public:
MOCK_METHOD(void,
OnAnimatorBeginFrame,
(fml::TimePoint frame_target_time, uint64_t frame_number),
(override));
void OnAnimatorNotifyIdle(fml::TimeDelta deadline) override {
notify_idle_called_ = true;
}
MOCK_METHOD(void,
OnAnimatorUpdateLatestFrameTargetTime,
(fml::TimePoint frame_target_time),
(override));
MOCK_METHOD(void,
OnAnimatorDraw,
(std::shared_ptr<FramePipeline> pipeline),
(override));
void OnAnimatorDrawLastLayerTrees(
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder) override {}
bool notify_idle_called_ = false;
};
TEST_F(ShellTest, VSyncTargetTime) {
// Add native callbacks to listen for window.onBeginFrame
int64_t target_time;
fml::AutoResetWaitableEvent on_target_time_latch;
auto nativeOnBeginFrame = [&on_target_time_latch,
&target_time](Dart_NativeArguments args) {
Dart_Handle exception = nullptr;
target_time =
tonic::DartConverter<int64_t>::FromArguments(args, 0, exception);
on_target_time_latch.Signal();
};
AddNativeCallback("NativeOnBeginFrame",
CREATE_NATIVE_ENTRY(nativeOnBeginFrame));
// Create all te prerequisites for a shell.
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
auto settings = CreateSettingsForFixture();
std::unique_ptr<Shell> shell;
TaskRunners task_runners = GetTaskRunnersForFixture();
// this is not used as we are not using simulated events.
const auto vsync_clock = std::make_shared<ShellTestVsyncClock>();
CreateVsyncWaiter create_vsync_waiter = [&]() {
return static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<ConstantFiringVsyncWaiter>(task_runners));
};
// create a shell with a constant firing vsync waiter.
auto platform_task = std::async(std::launch::async, [&]() {
fml::MessageLoop::EnsureInitializedForCurrentThread();
shell = Shell::Create(
flutter::PlatformData(), task_runners, settings,
[vsync_clock, &create_vsync_waiter](Shell& shell) {
return ShellTestPlatformView::Create(
shell, shell.GetTaskRunners(), vsync_clock, create_vsync_waiter,
ShellTestPlatformView::BackendType::kDefaultBackend, nullptr,
shell.GetIsGpuDisabledSyncSwitch());
},
[](Shell& shell) { return std::make_unique<Rasterizer>(shell); });
ASSERT_TRUE(DartVMRef::IsInstanceRunning());
auto configuration = RunConfiguration::InferFromSettings(settings);
ASSERT_TRUE(configuration.IsValid());
configuration.SetEntrypoint("onBeginFrameMain");
RunEngine(shell.get(), std::move(configuration));
});
platform_task.wait();
on_target_time_latch.Wait();
const auto vsync_waiter_target_time =
ConstantFiringVsyncWaiter::kFrameTargetTime;
ASSERT_EQ(vsync_waiter_target_time.ToEpochDelta().ToMicroseconds(),
target_time);
// validate that the latest target time has also been updated.
ASSERT_EQ(GetLatestFrameTargetTime(shell.get()), vsync_waiter_target_time);
// teardown.
DestroyShell(std::move(shell), task_runners);
ASSERT_FALSE(DartVMRef::IsInstanceRunning());
}
TEST_F(ShellTest, AnimatorDoesNotNotifyIdleBeforeRender) {
FakeAnimatorDelegate delegate;
TaskRunners task_runners = {
"test",
CreateNewThread(), // platform
CreateNewThread(), // raster
CreateNewThread(), // ui
CreateNewThread() // io
};
auto clock = std::make_shared<ShellTestVsyncClock>();
fml::AutoResetWaitableEvent latch;
std::shared_ptr<Animator> animator;
auto flush_vsync_task = [&] {
fml::AutoResetWaitableEvent ui_latch;
task_runners.GetUITaskRunner()->PostTask([&] { ui_latch.Signal(); });
do {
clock->SimulateVSync();
} while (ui_latch.WaitWithTimeout(fml::TimeDelta::FromMilliseconds(1)));
latch.Signal();
};
// Create the animator on the UI task runner.
task_runners.GetUITaskRunner()->PostTask([&] {
auto vsync_waiter = static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<ShellTestVsyncWaiter>(task_runners, clock));
animator = std::make_unique<Animator>(delegate, task_runners,
std::move(vsync_waiter));
latch.Signal();
});
latch.Wait();
// Validate it has not notified idle and start it. This will request a frame.
task_runners.GetUITaskRunner()->PostTask([&] {
ASSERT_FALSE(delegate.notify_idle_called_);
// Immediately request a frame saying it can reuse the last layer tree to
// avoid more calls to BeginFrame by the animator.
animator->RequestFrame(false);
task_runners.GetPlatformTaskRunner()->PostTask(flush_vsync_task);
});
latch.Wait();
ASSERT_FALSE(delegate.notify_idle_called_);
fml::AutoResetWaitableEvent render_latch;
// Validate it has not notified idle and try to render.
task_runners.GetUITaskRunner()->PostDelayedTask(
[&] {
ASSERT_FALSE(delegate.notify_idle_called_);
EXPECT_CALL(delegate, OnAnimatorBeginFrame).WillOnce([&] {
auto layer_tree = std::make_unique<LayerTree>(
LayerTree::Config(), SkISize::Make(600, 800));
animator->Render(kImplicitViewId, std::move(layer_tree), 1.0);
render_latch.Signal();
});
// Request a frame that builds a layer tree and renders a frame.
// When the frame is rendered, render_latch will be signaled.
animator->RequestFrame(true);
task_runners.GetPlatformTaskRunner()->PostTask(flush_vsync_task);
},
// See kNotifyIdleTaskWaitTime in animator.cc.
fml::TimeDelta::FromMilliseconds(60));
latch.Wait();
render_latch.Wait();
// A frame has been rendered, and the next frame request will notify idle.
// But at the moment there isn't another frame request, therefore it still
// hasn't notified idle.
task_runners.GetUITaskRunner()->PostTask([&] {
ASSERT_FALSE(delegate.notify_idle_called_);
// False to avoid getting cals to BeginFrame that will request more frames
// before we are ready.
animator->RequestFrame(false);
task_runners.GetPlatformTaskRunner()->PostTask(flush_vsync_task);
});
latch.Wait();
// Now it should notify idle. Make sure it is destroyed on the UI thread.
ASSERT_TRUE(delegate.notify_idle_called_);
task_runners.GetPlatformTaskRunner()->PostTask(flush_vsync_task);
latch.Wait();
task_runners.GetUITaskRunner()->PostTask([&] {
animator.reset();
latch.Signal();
});
latch.Wait();
}
TEST_F(ShellTest, AnimatorDoesNotNotifyDelegateIfPipelineIsNotEmpty) {
FakeAnimatorDelegate delegate;
TaskRunners task_runners = {
"test",
CreateNewThread(), // platform
CreateNewThread(), // raster
CreateNewThread(), // ui
CreateNewThread() // io
};
auto clock = std::make_shared<ShellTestVsyncClock>();
std::shared_ptr<Animator> animator;
auto flush_vsync_task = [&] {
fml::AutoResetWaitableEvent ui_latch;
task_runners.GetUITaskRunner()->PostTask([&] { ui_latch.Signal(); });
do {
clock->SimulateVSync();
} while (ui_latch.WaitWithTimeout(fml::TimeDelta::FromMilliseconds(1)));
};
// Create the animator on the UI task runner.
PostTaskSync(task_runners.GetUITaskRunner(), [&] {
auto vsync_waiter = static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<ShellTestVsyncWaiter>(task_runners, clock));
animator = std::make_unique<Animator>(delegate, task_runners,
std::move(vsync_waiter));
});
fml::AutoResetWaitableEvent begin_frame_latch;
// It must always be called when the method 'Animator::Render' is called,
// regardless of whether the pipeline is empty or not.
EXPECT_CALL(delegate, OnAnimatorUpdateLatestFrameTargetTime).Times(2);
// It will only be called once even though we call the method
// 'Animator::Render' twice. because it will only be called when the pipeline
// is empty.
EXPECT_CALL(delegate, OnAnimatorDraw).Times(1);
for (int i = 0; i < 2; i++) {
task_runners.GetUITaskRunner()->PostTask([&] {
EXPECT_CALL(delegate, OnAnimatorBeginFrame).WillOnce([&] {
auto layer_tree = std::make_unique<LayerTree>(LayerTree::Config(),
SkISize::Make(600, 800));
animator->Render(kImplicitViewId, std::move(layer_tree), 1.0);
begin_frame_latch.Signal();
});
animator->RequestFrame();
task_runners.GetPlatformTaskRunner()->PostTask(flush_vsync_task);
});
begin_frame_latch.Wait();
}
PostTaskSync(task_runners.GetUITaskRunner(), [&] { animator.reset(); });
}
} // namespace testing
} // namespace flutter
// NOLINTEND(clang-analyzer-core.StackAddressEscape)
| engine/shell/common/animator_unittests.cc/0 | {
"file_path": "engine/shell/common/animator_unittests.cc",
"repo_id": "engine",
"token_count": 3690
} | 324 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_COMMON_ENGINE_H_
#define FLUTTER_SHELL_COMMON_ENGINE_H_
#include <memory>
#include <string>
#include "flutter/assets/asset_manager.h"
#include "flutter/common/task_runners.h"
#include "flutter/fml/macros.h"
#include "flutter/fml/mapping.h"
#include "flutter/fml/memory/weak_ptr.h"
#include "flutter/lib/ui/painting/image_decoder.h"
#include "flutter/lib/ui/painting/image_generator_registry.h"
#include "flutter/lib/ui/semantics/custom_accessibility_action.h"
#include "flutter/lib/ui/semantics/semantics_node.h"
#include "flutter/lib/ui/snapshot_delegate.h"
#include "flutter/lib/ui/text/font_collection.h"
#include "flutter/lib/ui/volatile_path_tracker.h"
#include "flutter/lib/ui/window/platform_message.h"
#include "flutter/lib/ui/window/viewport_metrics.h"
#include "flutter/runtime/dart_vm.h"
#include "flutter/runtime/runtime_controller.h"
#include "flutter/runtime/runtime_delegate.h"
#include "flutter/shell/common/animator.h"
#include "flutter/shell/common/display_manager.h"
#include "flutter/shell/common/platform_view.h"
#include "flutter/shell/common/pointer_data_dispatcher.h"
#include "flutter/shell/common/run_configuration.h"
#include "flutter/shell/common/shell_io_manager.h"
namespace flutter {
//------------------------------------------------------------------------------
/// The engine is a component owned by the shell that resides on the UI task
/// runner and is responsible for managing the needs of the root isolate and its
/// runtime. The engine can only be created, accessed and collected on the UI
/// task runner. Each shell owns exactly one instance of the engine.
///
/// The root isolate of Flutter application gets "window" bindings. Using these
/// bindings, the application can schedule frames, post layer-trees for
/// rendering, ask to decompress images and upload them to the GPU, etc..
/// Non-root isolates of the VM do not get any of these capabilities and are run
/// in a VM managed thread pool (so if they did have "window", the threading
/// guarantees needed for engine operation would be violated).
///
/// The engine is responsible for the entire life-cycle of the root isolate.
/// When the engine is collected, its owner assumes that the root isolate has
/// been shutdown and appropriate resources collected. While each engine
/// instance can only manage a single instance of a root isolate, it may restart
/// that isolate on request. This is how the cold-restart development scenario
/// is supported.
///
/// When the engine instance is initially created, the root isolate is created
/// but it is not in the |DartIsolate::Phase::Running| phase yet. It only moves
/// into that phase when a successful call to `Engine::Run` is made.
///
/// @see `Shell`
///
/// @note This name of this class is perhaps a bit unfortunate and has
/// sometimes been the cause of confusion. For a class named "Engine"
/// in the Flutter "Engine" repository, its responsibilities are
/// decidedly unremarkable. But, it does happen to be the primary
/// entry-point used by components higher up in the Flutter tech stack
/// (usually in Dart code) to peer into the lower level functionality.
/// Besides, the authors haven't been able to come up with a more apt
/// name and it does happen to be one of the older classes in the
/// repository.
///
class Engine final : public RuntimeDelegate, PointerDataDispatcher::Delegate {
public:
//----------------------------------------------------------------------------
/// @brief Indicates the result of the call to `Engine::Run`.
///
enum class RunStatus {
// NOLINTBEGIN(readability-identifier-naming)
//--------------------------------------------------------------------------
/// The call to |Engine::Run| was successful and the root isolate is in the
/// `DartIsolate::Phase::Running` phase with its entry-point invocation
/// already pending in the task queue.
///
Success,
//--------------------------------------------------------------------------
/// The engine can only manage a single instance of a root isolate. If a
/// previous call to run the root isolate was successful, subsequent calls
/// to run the isolate (even if the new run configuration is different) will
/// be rejected.
///
/// It is up to the caller to decide to re-purpose the running isolate,
/// terminate it, or use another shell to host the new isolate. This is
/// mostly used by embedders which have a fire-and-forget strategy to root
/// isolate launch. For example, the application may try to "launch" an
/// isolate when the embedders launches or resumes from a paused state. That
/// the isolate is running is not necessarily a failure condition for them.
/// But from the engine's perspective, the run configuration was rejected.
///
FailureAlreadyRunning,
//--------------------------------------------------------------------------
/// Used to indicate to the embedder that a root isolate was not already
/// running but the run configuration was not valid and root isolate could
/// not be moved into the `DartIsolate::Phase::Running` phase.
///
/// The caller must attempt the run call again with a valid configuration.
/// The set of all failure modes is massive and can originate from a variety
/// of sub-components. The engine will attempt to log the same when
/// possible. With the aid of logs, the common causes of failure are:
///
/// * AOT assets were given to JIT/DBC mode VM's and vice-versa.
/// * The assets could not be found in the asset manager. Callers must make
/// sure their run configuration asset managers have been correctly set
/// up.
/// * The assets themselves were corrupt or invalid. Callers must make sure
/// their asset delivery mechanisms are sound.
/// * The application entry-point or the root library of the entry-point
/// specified in the run configuration was invalid. Callers must make sure
/// that the entry-point is present in the application. If the name of the
/// entrypoint is not "main" in the root library, callers must also ensure
/// that the snapshotting process has not tree-shaken away this
/// entrypoint. This requires the decoration of the entrypoint with the
/// `@pragma('vm:entry-point')` directive. This problem will manifest in
/// AOT mode operation of the Dart VM.
///
Failure,
// NOLINTEND(readability-identifier-naming)
};
//----------------------------------------------------------------------------
/// @brief While the engine operates entirely on the UI task runner, it
/// needs the capabilities of the other components to fulfill the
/// requirements of the root isolate. The shell is the only class
/// that implements this interface as no other component has
/// access to all components in a thread safe manner. The engine
/// delegates these tasks to the shell via this interface.
///
class Delegate {
public:
//--------------------------------------------------------------------------
/// @brief When the accessibility tree has been updated by the Flutter
/// application, this new information needs to be conveyed to
/// the underlying platform. The engine delegates this task to
/// the shell via this call. The engine cannot access the
/// underlying platform directly because of threading
/// considerations. Most platform specific APIs to convey
/// accessibility information are only safe to access on the
/// platform task runner while the engine is running on the UI
/// task runner.
///
/// @see `SemanticsNode`, `SemanticsNodeUpdates`,
/// `CustomAccessibilityActionUpdates`,
/// `PlatformView::UpdateSemantics`
///
/// @param[in] updates A map with the stable semantics node identifier as
/// key and the node properties as the value.
/// @param[in] actions A map with the stable semantics node identifier as
/// key and the custom node action as the value.
///
virtual void OnEngineUpdateSemantics(
SemanticsNodeUpdates updates,
CustomAccessibilityActionUpdates actions) = 0;
//--------------------------------------------------------------------------
/// @brief When the Flutter application has a message to send to the
/// underlying platform, the message needs to be forwarded to
/// the platform on the appropriate thread (via the platform
/// task runner). The engine delegates this task to the shell
/// via this method.
///
/// @see `PlatformView::HandlePlatformMessage`
///
/// @param[in] message The message from the Flutter application to send to
/// the underlying platform.
///
virtual void OnEngineHandlePlatformMessage(
std::unique_ptr<PlatformMessage> message) = 0;
//--------------------------------------------------------------------------
/// @brief Notifies the delegate that the root isolate of the
/// application is about to be discarded and a new isolate with
/// the same runtime started in its place. This should only
/// happen in the Flutter "debug" runtime mode in the
/// cold-restart scenario. The embedder may need to reset native
/// resource in response to the restart.
///
/// @see `PlatformView::OnPreEngineRestart`
///
virtual void OnPreEngineRestart() = 0;
//--------------------------------------------------------------------------
/// @brief Notifies the shell that the root isolate is created.
/// Currently, this information is to add to the service
/// protocol list of available root isolates running in the VM
/// and their names so that the appropriate isolate can be
/// selected in the tools for debugging and instrumentation.
///
virtual void OnRootIsolateCreated() = 0;
//--------------------------------------------------------------------------
/// @brief Notifies the shell of the name of the root isolate and its
/// port when that isolate is launched, restarted (in the
/// cold-restart scenario) or the application itself updates the
/// name of the root isolate (via
/// `PlatformDispatcher.setIsolateDebugName` in
/// `platform_dispatcher.dart`). The name of the isolate is
/// meaningless to the engine but is used in instrumentation and
/// tooling. Currently, this information is to update the
/// service protocol list of available root isolates running in
/// the VM and their names so that the appropriate isolate can
/// be selected in the tools for debugging and instrumentation.
///
/// @param[in] isolate_name The isolate name
/// @param[in] isolate_port The isolate port
///
virtual void UpdateIsolateDescription(const std::string isolate_name,
int64_t isolate_port) = 0;
//--------------------------------------------------------------------------
/// @brief Notifies the shell that the application has an opinion about
/// whether its frame timings need to be reported backed to it.
/// Due to the asynchronous nature of rendering in Flutter, it
/// is not possible for the application to determine the total
/// time it took to render a specific frame. While the
/// layer-tree is constructed on the UI thread, it needs to be
/// rendering on the raster thread. Dart code cannot execute on
/// this thread. So any instrumentation about the frame times
/// gathered on this thread needs to be aggregated and sent back
/// to the UI thread for processing in Dart.
///
/// When the application indicates that frame times need to be
/// reported, it collects this information till a specified
/// number of data points are gathered. Then this information is
/// sent back to Dart code via `Engine::ReportTimings`.
///
/// This option is engine counterpart of the
/// `Window._setNeedsReportTimings` in `window.dart`.
///
/// @param[in] needs_reporting If reporting information should be
/// collected and send back to Dart.
///
virtual void SetNeedsReportTimings(bool needs_reporting) = 0;
//--------------------------------------------------------------------------
/// @brief Directly invokes platform-specific APIs to compute the
/// locale the platform would have natively resolved to.
///
/// @param[in] supported_locale_data The vector of strings that represents
/// the locales supported by the app.
/// Each locale consists of three
/// strings: languageCode, countryCode,
/// and scriptCode in that order.
///
/// @return A vector of 3 strings languageCode, countryCode, and
/// scriptCode that represents the locale selected by the
/// platform. Empty strings mean the value was unassigned. Empty
/// vector represents a null locale.
///
virtual std::unique_ptr<std::vector<std::string>>
ComputePlatformResolvedLocale(
const std::vector<std::string>& supported_locale_data) = 0;
//--------------------------------------------------------------------------
/// @brief Invoked when the Dart VM requests that a deferred library
/// be loaded. Notifies the engine that the deferred library
/// identified by the specified loading unit id should be
/// downloaded and loaded into the Dart VM via
/// `LoadDartDeferredLibrary`
///
/// Upon encountering errors or otherwise failing to load a
/// loading unit with the specified id, the failure should be
/// directly reported to dart by calling
/// `LoadDartDeferredLibraryFailure` to ensure the waiting dart
/// future completes with an error.
///
/// @param[in] loading_unit_id The unique id of the deferred library's
/// loading unit. This id is to be passed
/// back into LoadDartDeferredLibrary
/// in order to identify which deferred
/// library to load.
///
virtual void RequestDartDeferredLibrary(intptr_t loading_unit_id) = 0;
//--------------------------------------------------------------------------
/// @brief Returns the current fml::TimePoint.
/// This method is primarily provided to allow tests to control
/// Any methods that rely on advancing the clock.
virtual fml::TimePoint GetCurrentTimePoint() = 0;
//----------------------------------------------------------------------------
/// @brief Returns the delegate object that handles PlatformMessage's from
/// Flutter to the host platform (and its responses).
virtual const std::shared_ptr<PlatformMessageHandler>&
GetPlatformMessageHandler() const = 0;
//--------------------------------------------------------------------------
/// @brief Invoked when a listener is registered on a platform channel.
///
/// @param[in] name The name of the platform channel to which a
/// listener has been registered or cleared.
///
/// @param[in] listening Whether the listener has been set (true) or
/// cleared (false).
///
virtual void OnEngineChannelUpdate(std::string name, bool listening) = 0;
//--------------------------------------------------------------------------
/// @brief Synchronously invokes platform-specific APIs to apply the
/// system text scaling on the given unscaled font size.
///
/// Platforms that support this feature (currently it's only
/// implemented for Android SDK level 34+) will send a valid
/// configuration_id to potential callers, before this method
/// can be called.
///
/// @param[in] unscaled_font_size The unscaled font size specified by the
/// app developer. The value is in logical
/// pixels, and is guaranteed to be finite
/// and non-negative.
/// @param[in] configuration_id The unique id of the configuration to
/// use for computing the scaled font size.
///
/// @return The scaled font size in logical pixels, or -1 when the given
/// configuration_id did not match a valid configuration.
///
virtual double GetScaledFontSize(double unscaled_font_size,
int configuration_id) const = 0;
};
//----------------------------------------------------------------------------
/// @brief Creates an instance of the engine with a supplied
/// `RuntimeController`. Use the other constructor except for
/// tests.
///
Engine(Delegate& delegate,
const PointerDataDispatcherMaker& dispatcher_maker,
const std::shared_ptr<fml::ConcurrentTaskRunner>&
image_decoder_task_runner,
const TaskRunners& task_runners,
const Settings& settings,
std::unique_ptr<Animator> animator,
const fml::WeakPtr<IOManager>& io_manager,
const std::shared_ptr<FontCollection>& font_collection,
std::unique_ptr<RuntimeController> runtime_controller,
const std::shared_ptr<fml::SyncSwitch>& gpu_disabled_switch);
//----------------------------------------------------------------------------
/// @brief Creates an instance of the engine. This is done by the Shell
/// on the UI task runner.
///
/// @param delegate The object used by the engine to perform
/// tasks that require access to components
/// that cannot be safely accessed by the
/// engine. This is the shell.
/// @param dispatcher_maker The callback provided by `PlatformView` for
/// engine to create the pointer data
/// dispatcher. Similar to other engine
/// resources, this dispatcher_maker and its
/// returned dispatcher is only safe to be
/// called from the UI thread.
/// @param vm An instance of the running Dart VM.
/// @param[in] isolate_snapshot The snapshot used to create the root
/// isolate. Even though the isolate is not
/// `DartIsolate::Phase::Running` phase, it is
/// created when the engine is created. This
/// requires access to the isolate snapshot
/// upfront.
// TODO(chinmaygarde): This is probably redundant now that the IO manager is
// it's own object.
/// @param[in] task_runners The task runners used by the shell that
/// hosts this engine.
/// @param[in] settings The settings used to initialize the shell
/// and the engine.
/// @param[in] animator The animator used to schedule frames.
// TODO(chinmaygarde): Move this to `Engine::Delegate`
/// @param[in] snapshot_delegate The delegate used to fulfill requests to
/// snapshot a specified scene. The engine
/// cannot snapshot a scene on the UI thread
/// directly because the scene (described via
/// a `DisplayList`) may reference resources on
/// the GPU and there is no GPU context current
/// on the UI thread. The delegate is a
/// component that has access to all the
/// requisite GPU resources.
/// @param[in] io_manager The IO manager used by this root isolate to
/// schedule tasks that manage resources on the
/// GPU.
///
Engine(Delegate& delegate,
const PointerDataDispatcherMaker& dispatcher_maker,
DartVM& vm,
fml::RefPtr<const DartSnapshot> isolate_snapshot,
const TaskRunners& task_runners,
const PlatformData& platform_data,
const Settings& settings,
std::unique_ptr<Animator> animator,
fml::WeakPtr<IOManager> io_manager,
const fml::RefPtr<SkiaUnrefQueue>& unref_queue,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate,
std::shared_ptr<VolatilePathTracker> volatile_path_tracker,
const std::shared_ptr<fml::SyncSwitch>& gpu_disabled_switch,
impeller::RuntimeStageBackend runtime_stage_type =
impeller::RuntimeStageBackend::kSkSL);
//----------------------------------------------------------------------------
/// @brief Create a Engine that shares as many resources as
/// possible with the calling Engine such that together
/// they occupy less memory and be created faster.
/// @details This should only be called on running Engines.
/// @return A new Engine with a running isolate.
/// @see Engine::Engine
/// @see DartIsolate::SpawnIsolate
///
std::unique_ptr<Engine> Spawn(
Delegate& delegate,
const PointerDataDispatcherMaker& dispatcher_maker,
const Settings& settings,
std::unique_ptr<Animator> animator,
const std::string& initial_route,
const fml::WeakPtr<IOManager>& io_manager,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate,
const std::shared_ptr<fml::SyncSwitch>& gpu_disabled_switch) const;
//----------------------------------------------------------------------------
/// @brief Destroys the engine engine. Called by the shell on the UI task
/// runner. The running root isolate is terminated and will no
/// longer access the task runner after this call returns. This
/// allows the embedder to tear down the thread immediately if
/// needed.
///
~Engine() override;
//----------------------------------------------------------------------------
/// @return The pointer to this instance of the engine. The engine may
/// only be accessed safely on the UI task runner.
///
fml::WeakPtr<Engine> GetWeakPtr() const;
//----------------------------------------------------------------------------
/// @brief Moves the root isolate to the `DartIsolate::Phase::Running`
/// phase on a successful call to this method.
///
/// The isolate itself is created when the engine is created, but
/// it is not yet in the running phase. This is done to amortize
/// initial time taken to launch the root isolate. The isolate
/// snapshots used to run the isolate can be fetched on another
/// thread while the engine itself is launched on the UI task
/// runner.
///
/// Repeated calls to this method after a successful run will be
/// rejected even if the run configuration is valid (with the
/// appropriate error returned).
///
/// @param[in] configuration The configuration used to run the root isolate.
/// The configuration must be valid.
///
/// @return The result of the call to run the root isolate.
///
[[nodiscard]] RunStatus Run(RunConfiguration configuration);
//----------------------------------------------------------------------------
/// @brief Tears down an existing root isolate, reuses the components of
/// that isolate and attempts to launch a new isolate using the
/// given the run configuration. This is only used in the
/// "debug" Flutter runtime mode in the cold-restart scenario.
///
/// @attention This operation must be performed with care as even a
/// non-successful restart will still tear down any existing root
/// isolate. In such cases, the engine and its shell must be
/// discarded.
///
/// @param[in] configuration The configuration used to launch the new
/// isolate.
///
/// @return Whether the restart was successful. If not, the engine and its
/// shell must be discarded.
///
[[nodiscard]] bool Restart(RunConfiguration configuration);
//----------------------------------------------------------------------------
/// @brief Setup default font manager according to specific platform.
///
void SetupDefaultFontManager();
//----------------------------------------------------------------------------
/// @brief Updates the asset manager referenced by the root isolate of a
/// Flutter application. This happens implicitly in the call to
/// `Engine::Run` and `Engine::Restart` as the asset manager is
/// referenced from the run configuration provided to those calls.
/// In addition to the `Engine::Run` and `Engine::Restart`
/// calls, the tooling may need to update the assets available to
/// the application as the user adds them to their project. For
/// example, these assets may be referenced by code that is newly
/// patched in after a hot-reload. Neither the shell or the
/// isolate in relaunched in such cases. The tooling usually
/// patches in the new assets in a temporary location and updates
/// the asset manager to point to that location.
///
/// @param[in] asset_manager The new asset manager to use for the running
/// root isolate.
///
/// @return If the asset manager was successfully replaced. This may fail
/// if the new asset manager is invalid.
///
bool UpdateAssetManager(const std::shared_ptr<AssetManager>& asset_manager);
//----------------------------------------------------------------------------
/// @brief Notifies the engine that it is time to begin working on a new
/// frame previously scheduled via a call to
/// `Engine::ScheduleFrame`. This call originates in the animator.
///
/// The frame time given as the argument indicates the point at
/// which the current frame interval began. It is very slightly
/// (because of scheduling overhead) in the past. If a new layer
/// tree is not produced and given to the raster task runner
/// within one frame interval from this point, the Flutter
/// application will jank.
///
/// If a root isolate is running, this method calls the
/// `::_beginFrame` method in `hooks.dart`. If a root isolate is
/// not running, this call does nothing.
///
/// This method encapsulates the entire UI thread frame workload.
/// The following (mis)behavior in the functioning of the method
/// will cause the jank in the Flutter application:
/// * The time taken by this method to create a layer-tree exceeds
/// one frame interval (for example, 16.66 ms on a 60Hz
/// display).
/// * The time take by this method to generate a new layer-tree
/// causes the current layer-tree pipeline depth to change. To
/// illustrate this point, note that maximum pipeline depth used
/// by layer tree in the engine is 2. If both the UI and GPU
/// task runner tasks finish within one frame interval, the
/// pipeline depth is one. If the UI thread happens to be
/// working on a frame when the raster thread is still not done
/// with the previous frame, the pipeline depth is 2. When the
/// pipeline depth changes from 1 to 2, animations and UI
/// interactions that cause the generation of the new layer tree
/// appropriate for (frame_time + one frame interval) will
/// actually end up at (frame_time + two frame intervals). This
/// is not what code running on the UI thread expected would
/// happen. This causes perceptible jank.
///
/// @param[in] frame_time The point at which the current frame interval
/// began. May be used by animation interpolators,
/// physics simulations, etc..
///
/// @param[in] frame_number The frame number recorded by the animator. Used
/// by the framework to associate frame specific
/// debug information with frame timings and timeline
/// events.
void BeginFrame(fml::TimePoint frame_time, uint64_t frame_number);
//----------------------------------------------------------------------------
/// @brief Notifies the engine that the UI task runner is not expected to
/// undertake a new frame workload till a specified timepoint. The
/// timepoint is measured in microseconds against the system's
/// monotonic clock. It is recommended that the clock be accessed
/// via `Dart_TimelineGetMicros` from `dart_api.h` for
/// consistency. In reality, the clocks used by Dart, FML and
/// std::steady_clock are all the same and the timepoints can be
/// converted from on clock type to another.
///
/// The Dart VM uses this notification to schedule book-keeping
/// tasks that may include a garbage collection. In this way, it
/// is less likely for the VM to perform such (potentially long
/// running) tasks in the middle of a frame workload.
///
/// This notification is advisory. That is, not providing this
/// notification does not mean garbage collection is postponed
/// till this call is made. If this notification is not provided,
/// garbage collection will happen based on the usual heuristics
/// used by the Dart VM.
///
/// Currently, this idle notification is delivered to the engine
/// at two points. Once, the deadline is calculated based on how
/// much time in the current frame interval is left on the UI task
/// runner. Since the next frame workload cannot begin till at
/// least the next callback from the vsync waiter, this period may
/// be used to used as a "small" idle notification. On the other
/// hand, if no more frames are scheduled, a large (but arbitrary)
/// idle notification deadline is chosen for a "big" idle
/// notification. Again, this notification does not guarantee
/// collection, just gives the Dart VM more hints about opportune
/// moments to perform collections.
///
///
/// @param[in] deadline The deadline is used by the VM to determine if the
/// corresponding sweep can be performed within the
/// deadline.
///
void NotifyIdle(fml::TimeDelta deadline);
//----------------------------------------------------------------------------
/// @brief Notifies the engine that the attached flutter view has been
/// destroyed.
/// This enables the engine to notify the Dart VM so it can do
/// some cleanp activities.
void NotifyDestroyed();
//----------------------------------------------------------------------------
/// @brief Dart code cannot fully measure the time it takes for a
/// specific frame to be rendered. This is because Dart code only
/// runs on the UI task runner. That is only a small part of the
/// overall frame workload. The raster task runner frame workload
/// is executed on a thread where Dart code cannot run (and hence
/// instrument). Besides, due to the pipelined nature of rendering
/// in Flutter, there may be multiple frame workloads being
/// processed at any given time. However, for non-Timeline based
/// profiling, it is useful for trace collection and processing to
/// happen in Dart. To do this, the raster task runner frame
/// workloads need to be instrumented separately. After a set
/// number of these profiles have been gathered, they need to be
/// reported back to Dart code. The shell reports this extra
/// instrumentation information back to Dart code running on the
/// engine by invoking this method at predefined intervals.
///
/// @see `FrameTiming`
///
// TODO(chinmaygarde): The use `int64_t` is added for ease of conversion to
// Dart but hurts readability. The phases and the units of the timepoints are
// not obvious without some sleuthing. The conversion can happen at the
// native interface boundary instead.
///
/// @param[in] timings Collection of `FrameTiming::kCount` * `n` timestamps
/// for `n` frames whose timings have not been reported
/// yet. A collection of integers is reported here for
/// easier conversions to Dart objects. The timestamps
/// are measured against the system monotonic clock
/// measured in microseconds.
///
void ReportTimings(std::vector<int64_t> timings);
//----------------------------------------------------------------------------
/// @brief Gets the main port of the root isolate. Since the isolate is
/// created immediately in the constructor of the engine, it is
/// possible to get its main port immediately (even before a call
/// to `Run` can be made). This is useful in registering the port
/// in a race free manner with a port nameserver.
///
/// @return The main port of the root isolate.
///
Dart_Port GetUIIsolateMainPort();
//----------------------------------------------------------------------------
/// @brief Gets the debug name of the root isolate. By default, the
/// debug name of the isolate is derived from its advisory script
/// URI, advisory main entrypoint and its main port name. For
/// example, "main.dart$main-1234" where the script URI is
/// "main.dart", the entrypoint is "main" and the port name
/// "1234". Once launched, the isolate may re-christen itself
/// using a name it selects via `setIsolateDebugName` in
/// `platform_dispatcher.dart`. This name is purely advisory and
/// only used by instrumentation and reporting purposes.
///
/// @return The debug name of the root isolate.
///
std::string GetUIIsolateName();
//----------------------------------------------------------------------------
/// @brief It is an unexpected challenge to determine when a Dart
/// application is "done". The application cannot simply terminate
/// the native process (and perhaps return an exit code) because
/// it does not have that power. After all, Flutter applications
/// reside within a host process that may have other
/// responsibilities besides just running Flutter applications.
/// Also, the `main` entry-points are run on an event loop and
/// returning from "main" (unlike in C/C++ applications) does not
/// mean termination of the process. Besides, the return value of
/// the main entrypoint is discarded.
///
/// One technique used by embedders to determine "liveness" is to
/// count the outstanding live ports dedicated to the application.
/// These ports may be live as a result of pending timers,
/// scheduled tasks, pending IO on sockets, channels open with
/// other isolates, etc.. At regular intervals (sometimes as often
/// as after the UI task runner processes any task), embedders may
/// check for the "liveness" of the application and perform
/// teardown of the embedder when no more ports are live.
///
/// @return Check if the root isolate has any live ports.
///
bool UIIsolateHasLivePorts();
//----------------------------------------------------------------------------
/// @brief Errors that are unhandled on the Dart message loop are kept
/// for further inspection till the next unhandled error comes
/// along. This accessor returns the last unhandled error
/// encountered by the root isolate.
///
/// @return The ui isolate last error.
///
tonic::DartErrorHandleType GetUIIsolateLastError();
//----------------------------------------------------------------------------
/// @brief As described in the discussion for `UIIsolateHasLivePorts`,
/// the "done-ness" of a Dart application is tricky to ascertain
/// and the return value from the main entrypoint is discarded
/// (because the Dart isolate is still running after the main
/// entrypoint returns). But, the concept of an exit code akin to
/// those returned by native applications is still useful. Short
/// lived Dart applications (usually tests), emulate this by
/// setting a per isolate "return value" and then indicating their
/// "done-ness" (usually via closing all live ports). This
/// accessor returns that "return value" is present.
///
/// @see `UIIsolateHasLivePorts`
///
/// @return The return code (if specified) by the isolate.
///
std::optional<uint32_t> GetUIIsolateReturnCode();
//----------------------------------------------------------------------------
/// @brief Notify the Flutter application that a new view is available.
///
/// A view must be added before other methods can refer to it,
/// including the implicit view. Adding a view that already exists
/// triggers an assertion.
///
/// @param[in] view_id The ID of the new view.
/// @param[in] viewport_metrics The initial viewport metrics for the view.
///
void AddView(int64_t view_id, const ViewportMetrics& view_metrics);
//----------------------------------------------------------------------------
/// @brief Notify the Flutter application that a view is no
/// longer available.
///
/// Removing a view that does not exist triggers an assertion.
///
/// The implicit view (kFlutterImplicitViewId) should never be
/// removed. Doing so triggers an assertion.
///
/// @param[in] view_id The ID of the view.
///
/// @return Whether the view was removed.
///
bool RemoveView(int64_t view_id);
//----------------------------------------------------------------------------
/// @brief Updates the viewport metrics for a view. The viewport metrics
/// detail the size of the rendering viewport in texels as well as
/// edge insets if present.
///
/// @see `ViewportMetrics`
///
/// @param[in] view_id The ID for the view that `metrics` describes.
/// @param[in] metrics The metrics.
///
void SetViewportMetrics(int64_t view_id, const ViewportMetrics& metrics);
//----------------------------------------------------------------------------
/// @brief Updates the display metrics for the currently running Flutter
/// application.
///
/// @param[in] displays A complete list of displays
///
void SetDisplays(const std::vector<DisplayData>& displays);
//----------------------------------------------------------------------------
/// @brief Notifies the engine that the embedder has sent it a message.
/// This call originates in the platform view and has been
/// forwarded to the engine on the UI task runner here.
///
/// @param[in] message The message sent from the embedder to the Dart
/// application.
///
void DispatchPlatformMessage(std::unique_ptr<PlatformMessage> message);
//----------------------------------------------------------------------------
/// @brief Notifies the engine that the embedder has sent it a pointer
/// data packet. A pointer data packet may contain multiple
/// input events. This call originates in the platform view and
/// the shell has forwarded the same to the engine on the UI task
/// runner here.
///
/// @param[in] packet The pointer data packet containing multiple
/// input events.
/// @param[in] trace_flow_id The trace flow identifier associated with the
/// pointer data packet. The engine uses this trace
/// identifier to connect trace flows in the
/// timeline from the input event to the
/// frames generated due to those input events.
/// These flows are tagged as "PointerEvent" in the
/// timeline and allow grouping frames and input
/// events into logical chunks.
///
void DispatchPointerDataPacket(std::unique_ptr<PointerDataPacket> packet,
uint64_t trace_flow_id);
//----------------------------------------------------------------------------
/// @brief Notifies the engine that the embedder encountered an
/// accessibility related action on the specified node. This call
/// originates on the platform view and has been forwarded to the
/// engine here on the UI task runner by the shell.
///
/// @param[in] node_id The identifier of the accessibility node.
/// @param[in] action The accessibility related action performed on the
/// node of the specified ID.
/// @param[in] args Optional data that applies to the specified action.
///
void DispatchSemanticsAction(int node_id,
SemanticsAction action,
fml::MallocMapping args);
//----------------------------------------------------------------------------
/// @brief Notifies the engine that the embedder has expressed an opinion
/// about whether the accessibility tree should be generated or
/// not. This call originates in the platform view and is
/// forwarded to the engine here on the UI task runner by the
/// shell.
///
/// @param[in] enabled Whether the accessibility tree is enabled or
/// disabled.
///
void SetSemanticsEnabled(bool enabled);
//----------------------------------------------------------------------------
/// @brief Notifies the engine that the embedder has expressed an opinion
/// about where the flags to set on the accessibility tree. This
/// flag originates in the platform view and is forwarded to the
/// engine here on the UI task runner by the shell.
///
/// The engine does not care about the accessibility feature flags
/// as all it does is forward this information from the embedder
/// to the framework. However, curious readers may refer to
/// `AccessibilityFeatures` in `window.dart` for currently
/// supported accessibility feature flags.
///
/// @param[in] flags The features to enable in the accessibility tree.
///
void SetAccessibilityFeatures(int32_t flags);
// |RuntimeDelegate|
void ScheduleFrame(bool regenerate_layer_trees) override;
/// Schedule a frame with the default parameter of regenerating the layer
/// tree.
void ScheduleFrame() { ScheduleFrame(true); }
// |RuntimeDelegate|
void OnAllViewsRendered() override;
// |RuntimeDelegate|
FontCollection& GetFontCollection() override;
// |RuntimeDelegate|
std::shared_ptr<AssetManager> GetAssetManager() override;
// Return the weak_ptr of ImageDecoder.
fml::WeakPtr<ImageDecoder> GetImageDecoderWeakPtr();
//----------------------------------------------------------------------------
/// @brief Get the `ImageGeneratorRegistry` associated with the current
/// engine.
///
/// @return The engine's `ImageGeneratorRegistry`.
///
fml::WeakPtr<ImageGeneratorRegistry> GetImageGeneratorRegistry();
// |PointerDataDispatcher::Delegate|
void DoDispatchPacket(std::unique_ptr<PointerDataPacket> packet,
uint64_t trace_flow_id) override;
// |PointerDataDispatcher::Delegate|
void ScheduleSecondaryVsyncCallback(uintptr_t id,
const fml::closure& callback) override;
//----------------------------------------------------------------------------
/// @brief Get the last Entrypoint that was used in the RunConfiguration
/// when |Engine::Run| was called.
///
const std::string& GetLastEntrypoint() const;
//----------------------------------------------------------------------------
/// @brief Get the last Entrypoint Library that was used in the
/// RunConfiguration when |Engine::Run| was called.
///
const std::string& GetLastEntrypointLibrary() const;
//----------------------------------------------------------------------------
/// @brief Get the last Entrypoint Arguments that was used in the
/// RunConfiguration when |Engine::Run| was called.This is only
/// valid in debug mode.
///
const std::vector<std::string>& GetLastEntrypointArgs() const;
//----------------------------------------------------------------------------
/// @brief Getter for the initial route. This can be set with a platform
/// message.
///
const std::string& InitialRoute() const { return initial_route_; }
//--------------------------------------------------------------------------
/// @brief Loads the Dart shared library into the Dart VM. When the
/// Dart library is loaded successfully, the Dart future
/// returned by the originating loadLibrary() call completes.
///
/// The Dart compiler may generate separate shared libraries
/// files called 'loading units' when libraries are imported
/// as deferred. Each of these shared libraries are identified
/// by a unique loading unit id. Callers should open and resolve
/// a SymbolMapping from the shared library. The Mappings should
/// be moved into this method, as ownership will be assumed by the
/// dart root isolate after successful loading and released after
/// shutdown of the root isolate. The loading unit may not be
/// used after isolate shutdown. If loading fails, the mappings
/// will be released.
///
/// This method is paired with a RequestDartDeferredLibrary
/// invocation that provides the embedder with the loading unit id
/// of the deferred library to load.
///
///
/// @param[in] loading_unit_id The unique id of the deferred library's
/// loading unit, as passed in by
/// RequestDartDeferredLibrary.
///
/// @param[in] snapshot_data Dart snapshot data of the loading unit's
/// shared library.
///
/// @param[in] snapshot_data Dart snapshot instructions of the loading
/// unit's shared library.
///
void LoadDartDeferredLibrary(
intptr_t loading_unit_id,
std::unique_ptr<const fml::Mapping> snapshot_data,
std::unique_ptr<const fml::Mapping> snapshot_instructions);
//--------------------------------------------------------------------------
/// @brief Indicates to the dart VM that the request to load a deferred
/// library with the specified loading unit id has failed.
///
/// The dart future returned by the initiating loadLibrary() call
/// will complete with an error.
///
/// @param[in] loading_unit_id The unique id of the deferred library's
/// loading unit, as passed in by
/// RequestDartDeferredLibrary.
///
/// @param[in] error_message The error message that will appear in the
/// dart Future.
///
/// @param[in] transient A transient error is a failure due to
/// temporary conditions such as no network.
/// Transient errors allow the dart VM to
/// re-request the same deferred library and
/// loading_unit_id again. Non-transient
/// errors are permanent and attempts to
/// re-request the library will instantly
/// complete with an error.
void LoadDartDeferredLibraryError(intptr_t loading_unit_id,
const std::string& error_message,
bool transient);
//--------------------------------------------------------------------------
/// @brief Accessor for the RuntimeController.
///
const RuntimeController* GetRuntimeController() const {
return runtime_controller_.get();
}
const std::weak_ptr<VsyncWaiter> GetVsyncWaiter() const;
//--------------------------------------------------------------------------
/// @brief Shuts down all registered platform isolates. Must be called
/// from the platform thread.
///
void ShutdownPlatformIsolates();
private:
// |RuntimeDelegate|
std::string DefaultRouteName() override;
// |RuntimeDelegate|
void Render(int64_t view_id,
std::unique_ptr<flutter::LayerTree> layer_tree,
float device_pixel_ratio) override;
// |RuntimeDelegate|
void UpdateSemantics(SemanticsNodeUpdates update,
CustomAccessibilityActionUpdates actions) override;
// |RuntimeDelegate|
void HandlePlatformMessage(std::unique_ptr<PlatformMessage> message) override;
// |RuntimeDelegate|
void OnRootIsolateCreated() override;
// |RuntimeDelegate|
void UpdateIsolateDescription(const std::string isolate_name,
int64_t isolate_port) override;
// |RuntimeDelegate|
std::unique_ptr<std::vector<std::string>> ComputePlatformResolvedLocale(
const std::vector<std::string>& supported_locale_data) override;
// |RuntimeDelegate|
void RequestDartDeferredLibrary(intptr_t loading_unit_id) override;
// |RuntimeDelegate|
std::weak_ptr<PlatformMessageHandler> GetPlatformMessageHandler()
const override;
// |RuntimeDelegate|
void SendChannelUpdate(std::string name, bool listening) override;
// |RuntimeDelegate|
double GetScaledFontSize(double unscaled_font_size,
int configuration_id) const override;
void SetNeedsReportTimings(bool value) override;
bool HandleLifecyclePlatformMessage(PlatformMessage* message);
bool HandleNavigationPlatformMessage(
std::unique_ptr<PlatformMessage> message);
bool HandleLocalizationPlatformMessage(PlatformMessage* message);
void HandleSettingsPlatformMessage(PlatformMessage* message);
void HandleAssetPlatformMessage(std::unique_ptr<PlatformMessage> message);
bool GetAssetAsBuffer(const std::string& name, std::vector<uint8_t>* data);
friend class testing::ShellTest;
Engine::Delegate& delegate_;
const Settings settings_;
std::unique_ptr<Animator> animator_;
std::unique_ptr<RuntimeController> runtime_controller_;
// The pointer_data_dispatcher_ depends on animator_ and runtime_controller_.
// So it should be defined after them to ensure that pointer_data_dispatcher_
// is destructed first.
std::unique_ptr<PointerDataDispatcher> pointer_data_dispatcher_;
std::string last_entry_point_;
std::string last_entry_point_library_;
std::vector<std::string> last_entry_point_args_;
std::string initial_route_;
std::shared_ptr<AssetManager> asset_manager_;
std::shared_ptr<FontCollection> font_collection_;
const std::unique_ptr<ImageDecoder> image_decoder_;
ImageGeneratorRegistry image_generator_registry_;
TaskRunners task_runners_;
fml::WeakPtrFactory<Engine> weak_factory_; // Must be the last member.
FML_DISALLOW_COPY_AND_ASSIGN(Engine);
};
} // namespace flutter
#endif // FLUTTER_SHELL_COMMON_ENGINE_H_
| engine/shell/common/engine.h/0 | {
"file_path": "engine/shell/common/engine.h",
"repo_id": "engine",
"token_count": 19329
} | 325 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/common/rasterizer.h"
#include <algorithm>
#include <memory>
#include <utility>
#include "display_list/dl_builder.h"
#include "flow/frame_timings.h"
#include "flutter/common/constants.h"
#include "flutter/common/graphics/persistent_cache.h"
#include "flutter/flow/layers/offscreen_surface.h"
#include "flutter/fml/time/time_delta.h"
#include "flutter/fml/time/time_point.h"
#include "flutter/shell/common/base64.h"
#include "flutter/shell/common/serialization_callbacks.h"
#include "fml/closure.h"
#include "fml/make_copyable.h"
#include "fml/synchronization/waitable_event.h"
#include "third_party/skia/include/core/SkColorSpace.h"
#include "third_party/skia/include/core/SkData.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/core/SkImageInfo.h"
#include "third_party/skia/include/core/SkMatrix.h"
#include "third_party/skia/include/core/SkPictureRecorder.h"
#include "third_party/skia/include/core/SkRect.h"
#include "third_party/skia/include/core/SkSerialProcs.h"
#include "third_party/skia/include/core/SkSize.h"
#include "third_party/skia/include/core/SkSurface.h"
#include "third_party/skia/include/encode/SkPngEncoder.h"
#include "third_party/skia/include/gpu/GpuTypes.h"
#include "third_party/skia/include/gpu/GrBackendSurface.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
#include "third_party/skia/include/gpu/GrTypes.h"
#include "third_party/skia/include/gpu/ganesh/SkSurfaceGanesh.h"
#if IMPELLER_SUPPORTS_RENDERING
#include "impeller/aiks/aiks_context.h" // nogncheck
#include "impeller/core/formats.h" // nogncheck
#include "impeller/display_list/dl_dispatcher.h" // nogncheck
#endif
namespace flutter {
// The rasterizer will tell Skia to purge cached resources that have not been
// used within this interval.
static constexpr std::chrono::milliseconds kSkiaCleanupExpiration(15000);
Rasterizer::Rasterizer(Delegate& delegate,
MakeGpuImageBehavior gpu_image_behavior)
: delegate_(delegate),
gpu_image_behavior_(gpu_image_behavior),
compositor_context_(std::make_unique<flutter::CompositorContext>(*this)),
snapshot_controller_(
SnapshotController::Make(*this, delegate.GetSettings())),
weak_factory_(this) {
FML_DCHECK(compositor_context_);
}
Rasterizer::~Rasterizer() = default;
fml::TaskRunnerAffineWeakPtr<Rasterizer> Rasterizer::GetWeakPtr() const {
return weak_factory_.GetWeakPtr();
}
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> Rasterizer::GetSnapshotDelegate()
const {
return weak_factory_.GetWeakPtr();
}
void Rasterizer::SetImpellerContext(
std::weak_ptr<impeller::Context> impeller_context) {
impeller_context_ = std::move(impeller_context);
}
void Rasterizer::Setup(std::unique_ptr<Surface> surface) {
surface_ = std::move(surface);
if (max_cache_bytes_.has_value()) {
SetResourceCacheMaxBytes(max_cache_bytes_.value(),
user_override_resource_cache_bytes_);
}
auto context_switch = surface_->MakeRenderContextCurrent();
if (context_switch->GetResult()) {
compositor_context_->OnGrContextCreated();
}
if (external_view_embedder_ &&
external_view_embedder_->SupportsDynamicThreadMerging() &&
!raster_thread_merger_) {
const auto platform_id =
delegate_.GetTaskRunners().GetPlatformTaskRunner()->GetTaskQueueId();
const auto gpu_id =
delegate_.GetTaskRunners().GetRasterTaskRunner()->GetTaskQueueId();
raster_thread_merger_ = fml::RasterThreadMerger::CreateOrShareThreadMerger(
delegate_.GetParentRasterThreadMerger(), platform_id, gpu_id);
}
if (raster_thread_merger_) {
raster_thread_merger_->SetMergeUnmergeCallback([this]() {
// Clear the GL context after the thread configuration has changed.
if (surface_) {
surface_->ClearRenderContext();
}
});
}
}
void Rasterizer::TeardownExternalViewEmbedder() {
if (external_view_embedder_) {
external_view_embedder_->Teardown();
}
}
void Rasterizer::Teardown() {
is_torn_down_ = true;
if (surface_) {
auto context_switch = surface_->MakeRenderContextCurrent();
if (context_switch->GetResult()) {
compositor_context_->OnGrContextDestroyed();
if (auto* context = surface_->GetContext()) {
context->purgeUnlockedResources(GrPurgeResourceOptions::kAllResources);
}
}
surface_.reset();
}
view_records_.clear();
if (raster_thread_merger_.get() != nullptr &&
raster_thread_merger_.get()->IsMerged()) {
FML_DCHECK(raster_thread_merger_->IsEnabled());
raster_thread_merger_->UnMergeNowIfLastOne();
raster_thread_merger_->SetMergeUnmergeCallback(nullptr);
}
}
bool Rasterizer::IsTornDown() {
return is_torn_down_;
}
std::optional<DrawSurfaceStatus> Rasterizer::GetLastDrawStatus(
int64_t view_id) {
auto found = view_records_.find(view_id);
if (found != view_records_.end()) {
return found->second.last_draw_status;
} else {
return std::optional<DrawSurfaceStatus>();
}
}
void Rasterizer::EnableThreadMergerIfNeeded() {
if (raster_thread_merger_) {
raster_thread_merger_->Enable();
}
}
void Rasterizer::DisableThreadMergerIfNeeded() {
if (raster_thread_merger_) {
raster_thread_merger_->Disable();
}
}
void Rasterizer::NotifyLowMemoryWarning() const {
if (!surface_) {
FML_DLOG(INFO)
<< "Rasterizer::NotifyLowMemoryWarning called with no surface.";
return;
}
auto context = surface_->GetContext();
if (!context) {
FML_DLOG(INFO)
<< "Rasterizer::NotifyLowMemoryWarning called with no GrContext.";
return;
}
auto context_switch = surface_->MakeRenderContextCurrent();
if (!context_switch->GetResult()) {
return;
}
context->performDeferredCleanup(std::chrono::milliseconds(0));
}
void Rasterizer::CollectView(int64_t view_id) {
view_records_.erase(view_id);
}
std::shared_ptr<flutter::TextureRegistry> Rasterizer::GetTextureRegistry() {
return compositor_context_->texture_registry();
}
GrDirectContext* Rasterizer::GetGrContext() {
return surface_ ? surface_->GetContext() : nullptr;
}
flutter::LayerTree* Rasterizer::GetLastLayerTree(int64_t view_id) {
auto found = view_records_.find(view_id);
if (found == view_records_.end()) {
return nullptr;
}
auto& last_task = found->second.last_successful_task;
if (last_task == nullptr) {
return nullptr;
}
return last_task->layer_tree.get();
}
void Rasterizer::DrawLastLayerTrees(
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder) {
if (!surface_) {
return;
}
std::vector<std::unique_ptr<LayerTreeTask>> tasks;
for (auto& [view_id, view_record] : view_records_) {
if (view_record.last_successful_task) {
tasks.push_back(std::move(view_record.last_successful_task));
}
}
if (tasks.empty()) {
return;
}
DoDrawResult result =
DrawToSurfaces(*frame_timings_recorder, std::move(tasks));
// EndFrame should perform cleanups for the external_view_embedder.
if (external_view_embedder_ && external_view_embedder_->GetUsedThisFrame()) {
bool should_resubmit_frame = ShouldResubmitFrame(result);
external_view_embedder_->SetUsedThisFrame(false);
external_view_embedder_->EndFrame(should_resubmit_frame,
raster_thread_merger_);
}
}
DrawStatus Rasterizer::Draw(const std::shared_ptr<FramePipeline>& pipeline) {
TRACE_EVENT0("flutter", "GPURasterizer::Draw");
if (raster_thread_merger_ &&
!raster_thread_merger_->IsOnRasterizingThread()) {
// we yield and let this frame be serviced on the right thread.
return DrawStatus::kYielded;
}
FML_DCHECK(delegate_.GetTaskRunners()
.GetRasterTaskRunner()
->RunsTasksOnCurrentThread());
DoDrawResult draw_result;
FramePipeline::Consumer consumer = [&draw_result,
this](std::unique_ptr<FrameItem> item) {
draw_result = DoDraw(std::move(item->frame_timings_recorder),
std::move(item->layer_tree_tasks));
};
PipelineConsumeResult consume_result = pipeline->Consume(consumer);
if (consume_result == PipelineConsumeResult::NoneAvailable) {
return DrawStatus::kPipelineEmpty;
}
// if the raster status is to resubmit the frame, we push the frame to the
// front of the queue and also change the consume status to more available.
bool should_resubmit_frame = ShouldResubmitFrame(draw_result);
if (should_resubmit_frame) {
FML_CHECK(draw_result.resubmitted_item);
auto front_continuation = pipeline->ProduceIfEmpty();
PipelineProduceResult pipeline_result =
front_continuation.Complete(std::move(draw_result.resubmitted_item));
if (pipeline_result.success) {
consume_result = PipelineConsumeResult::MoreAvailable;
}
} else if (draw_result.status == DoDrawStatus::kEnqueuePipeline) {
consume_result = PipelineConsumeResult::MoreAvailable;
}
// EndFrame should perform cleanups for the external_view_embedder.
if (external_view_embedder_ && external_view_embedder_->GetUsedThisFrame()) {
external_view_embedder_->SetUsedThisFrame(false);
external_view_embedder_->EndFrame(should_resubmit_frame,
raster_thread_merger_);
}
// Consume as many pipeline items as possible. But yield the event loop
// between successive tries.
switch (consume_result) {
case PipelineConsumeResult::MoreAvailable: {
delegate_.GetTaskRunners().GetRasterTaskRunner()->PostTask(
[weak_this = weak_factory_.GetWeakPtr(), pipeline]() {
if (weak_this) {
weak_this->Draw(pipeline);
}
});
break;
}
default:
break;
}
return ToDrawStatus(draw_result.status);
}
bool Rasterizer::ShouldResubmitFrame(const DoDrawResult& result) {
if (result.resubmitted_item) {
FML_CHECK(!result.resubmitted_item->layer_tree_tasks.empty());
return true;
}
return false;
}
DrawStatus Rasterizer::ToDrawStatus(DoDrawStatus status) {
switch (status) {
case DoDrawStatus::kEnqueuePipeline:
return DrawStatus::kDone;
case DoDrawStatus::kNotSetUp:
return DrawStatus::kNotSetUp;
case DoDrawStatus::kGpuUnavailable:
return DrawStatus::kGpuUnavailable;
case DoDrawStatus::kDone:
return DrawStatus::kDone;
}
FML_UNREACHABLE();
}
namespace {
std::unique_ptr<SnapshotDelegate::GpuImageResult> MakeBitmapImage(
const sk_sp<DisplayList>& display_list,
const SkImageInfo& image_info) {
FML_DCHECK(display_list);
// Use 16384 as a proxy for the maximum texture size for a GPU image.
// This is meant to be large enough to avoid false positives in test contexts,
// but not so artificially large to be completely unrealistic on any platform.
// This limit is taken from the Metal specification. D3D, Vulkan, and GL
// generally have lower limits.
if (image_info.width() > 16384 || image_info.height() > 16384) {
return std::make_unique<SnapshotDelegate::GpuImageResult>(
GrBackendTexture(), nullptr, nullptr,
"unable to create bitmap render target at specified size " +
std::to_string(image_info.width()) + "x" +
std::to_string(image_info.height()));
};
sk_sp<SkSurface> surface = SkSurfaces::Raster(image_info);
auto canvas = DlSkCanvasAdapter(surface->getCanvas());
canvas.Clear(DlColor::kTransparent());
canvas.DrawDisplayList(display_list);
sk_sp<SkImage> image = surface->makeImageSnapshot();
return std::make_unique<SnapshotDelegate::GpuImageResult>(
GrBackendTexture(), nullptr, image,
image ? "" : "Unable to create image");
}
} // namespace
std::unique_ptr<Rasterizer::GpuImageResult> Rasterizer::MakeSkiaGpuImage(
sk_sp<DisplayList> display_list,
const SkImageInfo& image_info) {
TRACE_EVENT0("flutter", "Rasterizer::MakeGpuImage");
FML_DCHECK(display_list);
std::unique_ptr<SnapshotDelegate::GpuImageResult> result;
delegate_.GetIsGpuDisabledSyncSwitch()->Execute(
fml::SyncSwitch::Handlers()
.SetIfTrue([&result, &image_info, &display_list] {
// TODO(dnfield): This isn't safe if display_list contains any GPU
// resources like an SkImage_gpu.
result = MakeBitmapImage(display_list, image_info);
})
.SetIfFalse([&result, &image_info, &display_list,
surface = surface_.get(),
gpu_image_behavior = gpu_image_behavior_] {
if (!surface ||
gpu_image_behavior == MakeGpuImageBehavior::kBitmap) {
// TODO(dnfield): This isn't safe if display_list contains any GPU
// resources like an SkImage_gpu.
result = MakeBitmapImage(display_list, image_info);
return;
}
auto context_switch = surface->MakeRenderContextCurrent();
if (!context_switch->GetResult()) {
result = MakeBitmapImage(display_list, image_info);
return;
}
auto* context = surface->GetContext();
if (!context) {
result = MakeBitmapImage(display_list, image_info);
return;
}
GrBackendTexture texture = context->createBackendTexture(
image_info.width(), image_info.height(), image_info.colorType(),
skgpu::Mipmapped::kNo, GrRenderable::kYes);
if (!texture.isValid()) {
result = std::make_unique<SnapshotDelegate::GpuImageResult>(
GrBackendTexture(), nullptr, nullptr,
"unable to create texture render target at specified size " +
std::to_string(image_info.width()) + "x" +
std::to_string(image_info.height()));
return;
}
sk_sp<SkSurface> sk_surface = SkSurfaces::WrapBackendTexture(
context, texture, kTopLeft_GrSurfaceOrigin, /*sampleCnt=*/0,
image_info.colorType(), image_info.refColorSpace(), nullptr);
if (!sk_surface) {
result = std::make_unique<SnapshotDelegate::GpuImageResult>(
GrBackendTexture(), nullptr, nullptr,
"unable to create rendering surface for image");
return;
}
auto canvas = DlSkCanvasAdapter(sk_surface->getCanvas());
canvas.Clear(DlColor::kTransparent());
canvas.DrawDisplayList(display_list);
result = std::make_unique<SnapshotDelegate::GpuImageResult>(
texture, sk_ref_sp(context), nullptr, "");
}));
return result;
}
sk_sp<DlImage> Rasterizer::MakeRasterSnapshot(sk_sp<DisplayList> display_list,
SkISize picture_size) {
return snapshot_controller_->MakeRasterSnapshot(display_list, picture_size);
}
sk_sp<SkImage> Rasterizer::ConvertToRasterImage(sk_sp<SkImage> image) {
TRACE_EVENT0("flutter", __FUNCTION__);
return snapshot_controller_->ConvertToRasterImage(image);
}
fml::Milliseconds Rasterizer::GetFrameBudget() const {
return delegate_.GetFrameBudget();
};
Rasterizer::DoDrawResult Rasterizer::DoDraw(
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder,
std::vector<std::unique_ptr<LayerTreeTask>> tasks) {
TRACE_EVENT_WITH_FRAME_NUMBER(frame_timings_recorder, "flutter",
"Rasterizer::DoDraw", /*flow_id_count=*/0,
/*flow_ids=*/nullptr);
FML_DCHECK(delegate_.GetTaskRunners()
.GetRasterTaskRunner()
->RunsTasksOnCurrentThread());
frame_timings_recorder->AssertInState(FrameTimingsRecorder::State::kBuildEnd);
if (tasks.empty()) {
return DoDrawResult{DoDrawStatus::kDone};
}
if (!surface_) {
return DoDrawResult{DoDrawStatus::kNotSetUp};
}
PersistentCache* persistent_cache = PersistentCache::GetCacheForProcess();
persistent_cache->ResetStoredNewShaders();
DoDrawResult result =
DrawToSurfaces(*frame_timings_recorder, std::move(tasks));
FML_DCHECK(result.status != DoDrawStatus::kEnqueuePipeline);
if (result.status == DoDrawStatus::kGpuUnavailable) {
return DoDrawResult{DoDrawStatus::kGpuUnavailable};
}
if (persistent_cache->IsDumpingSkp() &&
persistent_cache->StoredNewShaders()) {
auto screenshot =
ScreenshotLastLayerTree(ScreenshotType::SkiaPicture, false);
persistent_cache->DumpSkp(*screenshot.data);
}
// TODO(liyuqian): in Fuchsia, the rasterization doesn't finish when
// Rasterizer::DoDraw finishes. Future work is needed to adapt the timestamp
// for Fuchsia to capture SceneUpdateContext::ExecutePaintTasks.
delegate_.OnFrameRasterized(frame_timings_recorder->GetRecordedTime());
// SceneDisplayLag events are disabled on Fuchsia.
// see: https://github.com/flutter/flutter/issues/56598
#if !defined(OS_FUCHSIA)
const fml::TimePoint raster_finish_time =
frame_timings_recorder->GetRasterEndTime();
fml::TimePoint frame_target_time =
frame_timings_recorder->GetVsyncTargetTime();
if (raster_finish_time > frame_target_time) {
fml::TimePoint latest_frame_target_time =
delegate_.GetLatestFrameTargetTime();
const auto frame_budget_millis = delegate_.GetFrameBudget().count();
if (latest_frame_target_time < raster_finish_time) {
latest_frame_target_time =
latest_frame_target_time +
fml::TimeDelta::FromMillisecondsF(frame_budget_millis);
}
const auto frame_lag =
(latest_frame_target_time - frame_target_time).ToMillisecondsF();
const int vsync_transitions_missed = round(frame_lag / frame_budget_millis);
fml::tracing::TraceEventAsyncComplete(
"flutter", // category
"SceneDisplayLag", // name
raster_finish_time, // begin_time
latest_frame_target_time, // end_time
"frame_target_time", // arg_key_1
frame_target_time, // arg_val_1
"current_frame_target_time", // arg_key_2
latest_frame_target_time, // arg_val_2
"vsync_transitions_missed", // arg_key_3
vsync_transitions_missed // arg_val_3
);
}
#endif
// Pipeline pressure is applied from a couple of places:
// rasterizer: When there are more items as of the time of Consume.
// animator (via shell): Frame gets produces every vsync.
// Enqueing here is to account for the following scenario:
// T = 1
// - one item (A) in the pipeline
// - rasterizer starts (and merges the threads)
// - pipeline consume result says no items to process
// T = 2
// - animator produces (B) to the pipeline
// - applies pipeline pressure via platform thread.
// T = 3
// - rasterizes finished (and un-merges the threads)
// - |Draw| for B yields as its on the wrong thread.
// This enqueue ensures that we attempt to consume from the right
// thread one more time after un-merge.
if (raster_thread_merger_) {
if (raster_thread_merger_->DecrementLease() ==
fml::RasterThreadStatus::kUnmergedNow) {
return DoDrawResult{
.status = DoDrawStatus::kEnqueuePipeline,
.resubmitted_item = std::move(result.resubmitted_item),
};
}
}
return result;
}
Rasterizer::DoDrawResult Rasterizer::DrawToSurfaces(
FrameTimingsRecorder& frame_timings_recorder,
std::vector<std::unique_ptr<LayerTreeTask>> tasks) {
TRACE_EVENT0("flutter", "Rasterizer::DrawToSurfaces");
FML_DCHECK(surface_);
frame_timings_recorder.AssertInState(FrameTimingsRecorder::State::kBuildEnd);
DoDrawResult result{
.status = DoDrawStatus::kDone,
};
if (surface_->AllowsDrawingWhenGpuDisabled()) {
result.resubmitted_item =
DrawToSurfacesUnsafe(frame_timings_recorder, std::move(tasks));
} else {
delegate_.GetIsGpuDisabledSyncSwitch()->Execute(
fml::SyncSwitch::Handlers()
.SetIfTrue([&] {
result.status = DoDrawStatus::kGpuUnavailable;
frame_timings_recorder.RecordRasterStart(fml::TimePoint::Now());
frame_timings_recorder.RecordRasterEnd();
})
.SetIfFalse([&] {
result.resubmitted_item = DrawToSurfacesUnsafe(
frame_timings_recorder, std::move(tasks));
}));
}
frame_timings_recorder.AssertInState(FrameTimingsRecorder::State::kRasterEnd);
return result;
}
std::unique_ptr<FrameItem> Rasterizer::DrawToSurfacesUnsafe(
FrameTimingsRecorder& frame_timings_recorder,
std::vector<std::unique_ptr<LayerTreeTask>> tasks) {
compositor_context_->ui_time().SetLapTime(
frame_timings_recorder.GetBuildDuration());
// First traverse: Filter out discarded trees
auto task_iter = tasks.begin();
while (task_iter != tasks.end()) {
LayerTreeTask& task = **task_iter;
if (delegate_.ShouldDiscardLayerTree(task.view_id, *task.layer_tree)) {
EnsureViewRecord(task.view_id).last_draw_status =
DrawSurfaceStatus::kDiscarded;
task_iter = tasks.erase(task_iter);
} else {
++task_iter;
}
}
if (tasks.empty()) {
frame_timings_recorder.RecordRasterStart(fml::TimePoint::Now());
frame_timings_recorder.RecordRasterEnd();
return nullptr;
}
if (external_view_embedder_) {
FML_DCHECK(!external_view_embedder_->GetUsedThisFrame());
external_view_embedder_->SetUsedThisFrame(true);
external_view_embedder_->BeginFrame(surface_->GetContext(),
raster_thread_merger_);
}
std::optional<fml::TimePoint> presentation_time = std::nullopt;
// TODO (https://github.com/flutter/flutter/issues/105596): this can be in
// the past and might need to get snapped to future as this frame could
// have been resubmitted. `presentation_time` on SubmitInfo is not set
// in this case.
{
const auto vsync_target_time = frame_timings_recorder.GetVsyncTargetTime();
if (vsync_target_time > fml::TimePoint::Now()) {
presentation_time = vsync_target_time;
}
}
frame_timings_recorder.RecordRasterStart(fml::TimePoint::Now());
// Second traverse: draw all layer trees.
std::vector<std::unique_ptr<LayerTreeTask>> resubmitted_tasks;
for (std::unique_ptr<LayerTreeTask>& task : tasks) {
int64_t view_id = task->view_id;
std::unique_ptr<LayerTree> layer_tree = std::move(task->layer_tree);
float device_pixel_ratio = task->device_pixel_ratio;
DrawSurfaceStatus status = DrawToSurfaceUnsafe(
view_id, *layer_tree, device_pixel_ratio, presentation_time);
FML_DCHECK(status != DrawSurfaceStatus::kDiscarded);
auto& view_record = EnsureViewRecord(task->view_id);
view_record.last_draw_status = status;
if (status == DrawSurfaceStatus::kSuccess) {
view_record.last_successful_task = std::make_unique<LayerTreeTask>(
view_id, std::move(layer_tree), device_pixel_ratio);
} else if (status == DrawSurfaceStatus::kRetry) {
resubmitted_tasks.push_back(std::make_unique<LayerTreeTask>(
view_id, std::move(layer_tree), device_pixel_ratio));
}
}
// TODO(dkwingsmt): Pass in raster cache(s) for all views.
// See https://github.com/flutter/flutter/issues/135530, item 4.
frame_timings_recorder.RecordRasterEnd(&compositor_context_->raster_cache());
FireNextFrameCallbackIfPresent();
if (surface_->GetContext()) {
surface_->GetContext()->performDeferredCleanup(kSkiaCleanupExpiration);
}
if (resubmitted_tasks.empty()) {
return nullptr;
} else {
return std::make_unique<FrameItem>(
std::move(resubmitted_tasks),
frame_timings_recorder.CloneUntil(
FrameTimingsRecorder::State::kBuildEnd));
}
}
/// \see Rasterizer::DrawToSurfaces
DrawSurfaceStatus Rasterizer::DrawToSurfaceUnsafe(
int64_t view_id,
flutter::LayerTree& layer_tree,
float device_pixel_ratio,
std::optional<fml::TimePoint> presentation_time) {
FML_DCHECK(surface_);
DlCanvas* embedder_root_canvas = nullptr;
if (external_view_embedder_) {
external_view_embedder_->PrepareFlutterView(
view_id, layer_tree.frame_size(), device_pixel_ratio);
// TODO(dkwingsmt): Add view ID here.
embedder_root_canvas = external_view_embedder_->GetRootCanvas();
}
// On Android, the external view embedder deletes surfaces in `BeginFrame`.
//
// Deleting a surface also clears the GL context. Therefore, acquire the
// frame after calling `BeginFrame` as this operation resets the GL context.
auto frame = surface_->AcquireFrame(layer_tree.frame_size());
if (frame == nullptr) {
return DrawSurfaceStatus::kFailed;
}
// If the external view embedder has specified an optional root surface, the
// root surface transformation is set by the embedder instead of
// having to apply it here.
SkMatrix root_surface_transformation =
embedder_root_canvas ? SkMatrix{} : surface_->GetRootTransformation();
auto root_surface_canvas =
embedder_root_canvas ? embedder_root_canvas : frame->Canvas();
auto compositor_frame = compositor_context_->AcquireFrame(
surface_->GetContext(), // skia GrContext
root_surface_canvas, // root surface canvas
external_view_embedder_.get(), // external view embedder
root_surface_transformation, // root surface transformation
true, // instrumentation enabled
frame->framebuffer_info()
.supports_readback, // surface supports pixel reads
raster_thread_merger_, // thread merger
surface_->GetAiksContext().get() // aiks context
);
if (compositor_frame) {
compositor_context_->raster_cache().BeginFrame();
std::unique_ptr<FrameDamage> damage;
// when leaf layer tracing is enabled we wish to repaint the whole frame
// for accurate performance metrics.
if (frame->framebuffer_info().supports_partial_repaint &&
!layer_tree.is_leaf_layer_tracing_enabled()) {
// Disable partial repaint if external_view_embedder_ SubmitFlutterView is
// involved - ExternalViewEmbedder unconditionally clears the entire
// surface and also partial repaint with platform view present is
// something that still need to be figured out.
bool force_full_repaint =
external_view_embedder_ &&
(!raster_thread_merger_ || raster_thread_merger_->IsMerged());
damage = std::make_unique<FrameDamage>();
auto existing_damage = frame->framebuffer_info().existing_damage;
if (existing_damage.has_value() && !force_full_repaint) {
damage->SetPreviousLayerTree(GetLastLayerTree(view_id));
damage->AddAdditionalDamage(existing_damage.value());
damage->SetClipAlignment(
frame->framebuffer_info().horizontal_clip_alignment,
frame->framebuffer_info().vertical_clip_alignment);
}
}
bool ignore_raster_cache = true;
if (surface_->EnableRasterCache() &&
!layer_tree.is_leaf_layer_tracing_enabled()) {
ignore_raster_cache = false;
}
RasterStatus frame_status =
compositor_frame->Raster(layer_tree, // layer tree
ignore_raster_cache, // ignore raster cache
damage.get() // frame damage
);
if (frame_status == RasterStatus::kSkipAndRetry) {
return DrawSurfaceStatus::kRetry;
}
SurfaceFrame::SubmitInfo submit_info;
submit_info.presentation_time = presentation_time;
if (damage) {
submit_info.frame_damage = damage->GetFrameDamage();
submit_info.buffer_damage = damage->GetBufferDamage();
}
frame->set_submit_info(submit_info);
if (external_view_embedder_ &&
(!raster_thread_merger_ || raster_thread_merger_->IsMerged())) {
FML_DCHECK(!frame->IsSubmitted());
external_view_embedder_->SubmitFlutterView(
surface_->GetContext(), surface_->GetAiksContext(), std::move(frame));
} else {
frame->Submit();
}
// Do not update raster cache metrics for kResubmit because that status
// indicates that the frame was not actually painted.
if (frame_status != RasterStatus::kResubmit) {
compositor_context_->raster_cache().EndFrame();
}
if (frame_status == RasterStatus::kResubmit) {
return DrawSurfaceStatus::kRetry;
} else {
FML_CHECK(frame_status == RasterStatus::kSuccess);
return DrawSurfaceStatus::kSuccess;
}
}
return DrawSurfaceStatus::kFailed;
}
Rasterizer::ViewRecord& Rasterizer::EnsureViewRecord(int64_t view_id) {
return view_records_[view_id];
}
static sk_sp<SkData> ScreenshotLayerTreeAsPicture(
flutter::LayerTree* tree,
flutter::CompositorContext& compositor_context) {
FML_DCHECK(tree != nullptr);
SkPictureRecorder recorder;
recorder.beginRecording(
SkRect::MakeWH(tree->frame_size().width(), tree->frame_size().height()));
SkMatrix root_surface_transformation;
root_surface_transformation.reset();
DlSkCanvasAdapter canvas(recorder.getRecordingCanvas());
// TODO(amirh): figure out how to take a screenshot with embedded UIView.
// https://github.com/flutter/flutter/issues/23435
auto frame = compositor_context.AcquireFrame(nullptr, &canvas, nullptr,
root_surface_transformation,
false, true, nullptr, nullptr);
frame->Raster(*tree, true, nullptr);
#if defined(OS_FUCHSIA)
SkSerialProcs procs = {0};
procs.fImageProc = SerializeImageWithoutData;
procs.fTypefaceProc = SerializeTypefaceWithoutData;
#else
SkSerialProcs procs = {0};
procs.fTypefaceProc = SerializeTypefaceWithData;
procs.fImageProc = [](SkImage* img, void*) -> sk_sp<SkData> {
return SkPngEncoder::Encode(nullptr, img, SkPngEncoder::Options{});
};
#endif
return recorder.finishRecordingAsPicture()->serialize(&procs);
}
static void RenderFrameForScreenshot(
flutter::CompositorContext& compositor_context,
DlCanvas* canvas,
flutter::LayerTree* tree,
GrDirectContext* surface_context,
const std::shared_ptr<impeller::AiksContext>& aiks_context) {
// There is no root surface transformation for the screenshot layer. Reset
// the matrix to identity.
SkMatrix root_surface_transformation;
root_surface_transformation.reset();
auto frame = compositor_context.AcquireFrame(
/*gr_context=*/surface_context,
/*canvas=*/canvas,
/*view_embedder=*/nullptr,
/*root_surface_transformation=*/root_surface_transformation,
/*instrumentation_enabled=*/false,
/*surface_supports_readback=*/true,
/*raster_thread_merger=*/nullptr,
/*aiks_context=*/aiks_context.get());
canvas->Clear(DlColor::kTransparent());
frame->Raster(*tree, true, nullptr);
canvas->Flush();
}
#if IMPELLER_SUPPORTS_RENDERING
Rasterizer::ScreenshotFormat ToScreenshotFormat(impeller::PixelFormat format) {
switch (format) {
case impeller::PixelFormat::kUnknown:
case impeller::PixelFormat::kA8UNormInt:
case impeller::PixelFormat::kR8UNormInt:
case impeller::PixelFormat::kR8G8UNormInt:
case impeller::PixelFormat::kR8G8B8A8UNormIntSRGB:
case impeller::PixelFormat::kB8G8R8A8UNormIntSRGB:
case impeller::PixelFormat::kB10G10R10XRSRGB:
case impeller::PixelFormat::kS8UInt:
case impeller::PixelFormat::kD24UnormS8Uint:
case impeller::PixelFormat::kD32FloatS8UInt:
case impeller::PixelFormat::kR32G32B32A32Float:
case impeller::PixelFormat::kB10G10R10XR:
case impeller::PixelFormat::kB10G10R10A10XR:
FML_DCHECK(false);
return Rasterizer::ScreenshotFormat::kUnknown;
case impeller::PixelFormat::kR8G8B8A8UNormInt:
return Rasterizer::ScreenshotFormat::kR8G8B8A8UNormInt;
case impeller::PixelFormat::kB8G8R8A8UNormInt:
return Rasterizer::ScreenshotFormat::kB8G8R8A8UNormInt;
case impeller::PixelFormat::kR16G16B16A16Float:
return Rasterizer::ScreenshotFormat::kR16G16B16A16Float;
}
}
static std::pair<sk_sp<SkData>, Rasterizer::ScreenshotFormat>
ScreenshotLayerTreeAsImageImpeller(
const std::shared_ptr<impeller::AiksContext>& aiks_context,
flutter::LayerTree* tree,
flutter::CompositorContext& compositor_context,
bool compressed) {
if (compressed) {
FML_LOG(ERROR) << "Compressed screenshots not supported for Impeller";
return {nullptr, Rasterizer::ScreenshotFormat::kUnknown};
}
DisplayListBuilder builder(SkRect::MakeSize(
SkSize::Make(tree->frame_size().fWidth, tree->frame_size().fHeight)));
RenderFrameForScreenshot(compositor_context, &builder, tree, nullptr,
aiks_context);
impeller::DlDispatcher dispatcher;
builder.Build()->Dispatch(dispatcher);
const auto& picture = dispatcher.EndRecordingAsPicture();
const auto& image = picture.ToImage(
*aiks_context,
impeller::ISize(tree->frame_size().fWidth, tree->frame_size().fHeight));
const auto& texture = image->GetTexture();
impeller::DeviceBufferDescriptor buffer_desc;
buffer_desc.storage_mode = impeller::StorageMode::kHostVisible;
buffer_desc.size =
texture->GetTextureDescriptor().GetByteSizeOfBaseMipLevel();
auto impeller_context = aiks_context->GetContext();
auto buffer =
impeller_context->GetResourceAllocator()->CreateBuffer(buffer_desc);
auto command_buffer = impeller_context->CreateCommandBuffer();
command_buffer->SetLabel("BlitTextureToBuffer Command Buffer");
auto pass = command_buffer->CreateBlitPass();
pass->AddCopy(texture, buffer);
pass->EncodeCommands(impeller_context->GetResourceAllocator());
fml::AutoResetWaitableEvent latch;
sk_sp<SkData> sk_data;
auto completion = [buffer, &buffer_desc, &sk_data,
&latch](impeller::CommandBuffer::Status status) {
fml::ScopedCleanupClosure cleanup([&latch]() { latch.Signal(); });
if (status != impeller::CommandBuffer::Status::kCompleted) {
FML_LOG(ERROR) << "Failed to complete blit pass.";
return;
}
sk_data = SkData::MakeWithCopy(buffer->OnGetContents(), buffer_desc.size);
};
if (!impeller_context->GetCommandQueue()
->Submit({command_buffer}, completion)
.ok()) {
FML_LOG(ERROR) << "Failed to submit commands.";
}
latch.Wait();
return std::make_pair(
sk_data, ToScreenshotFormat(texture->GetTextureDescriptor().format));
}
#endif
std::pair<sk_sp<SkData>, Rasterizer::ScreenshotFormat>
Rasterizer::ScreenshotLayerTreeAsImage(
flutter::LayerTree* tree,
flutter::CompositorContext& compositor_context,
bool compressed) {
#if IMPELLER_SUPPORTS_RENDERING
if (delegate_.GetSettings().enable_impeller) {
return ScreenshotLayerTreeAsImageImpeller(GetAiksContext(), tree,
compositor_context, compressed);
}
#endif // IMPELLER_SUPPORTS_RENDERING
GrDirectContext* surface_context = GetGrContext();
// Attempt to create a snapshot surface depending on whether we have access
// to a valid GPU rendering context.
std::unique_ptr<OffscreenSurface> snapshot_surface =
std::make_unique<OffscreenSurface>(surface_context, tree->frame_size());
if (!snapshot_surface->IsValid()) {
FML_LOG(ERROR) << "Screenshot: unable to create snapshot surface";
return {nullptr, ScreenshotFormat::kUnknown};
}
// Draw the current layer tree into the snapshot surface.
DlCanvas* canvas = snapshot_surface->GetCanvas();
// snapshot_surface->makeImageSnapshot needs the GL context to be set if the
// render context is GL. frame->Raster() pops the gl context in platforms
// that gl context switching are used. (For example, older iOS that uses GL)
// We reset the GL context using the context switch.
auto context_switch = surface_->MakeRenderContextCurrent();
if (!context_switch->GetResult()) {
FML_LOG(ERROR) << "Screenshot: unable to make image screenshot";
return {nullptr, ScreenshotFormat::kUnknown};
}
RenderFrameForScreenshot(compositor_context, canvas, tree, surface_context,
nullptr);
return std::make_pair(snapshot_surface->GetRasterData(compressed),
ScreenshotFormat::kUnknown);
}
Rasterizer::Screenshot Rasterizer::ScreenshotLastLayerTree(
Rasterizer::ScreenshotType type,
bool base64_encode) {
if (delegate_.GetSettings().enable_impeller &&
type == ScreenshotType::SkiaPicture) {
FML_DCHECK(false);
FML_LOG(ERROR) << "Last layer tree cannot be screenshotted as a "
"SkiaPicture when using Impeller.";
return {};
}
// TODO(dkwingsmt): Support screenshotting all last layer trees
// when the shell protocol supports multi-views.
// https://github.com/flutter/flutter/issues/135534
// https://github.com/flutter/flutter/issues/135535
auto* layer_tree = GetLastLayerTree(kFlutterImplicitViewId);
if (layer_tree == nullptr) {
FML_LOG(ERROR) << "Last layer tree was null when screenshotting.";
return {};
}
std::pair<sk_sp<SkData>, ScreenshotFormat> data{nullptr,
ScreenshotFormat::kUnknown};
std::string format;
switch (type) {
case ScreenshotType::SkiaPicture:
format = "ScreenshotType::SkiaPicture";
data.first =
ScreenshotLayerTreeAsPicture(layer_tree, *compositor_context_);
break;
case ScreenshotType::UncompressedImage:
format = "ScreenshotType::UncompressedImage";
data =
ScreenshotLayerTreeAsImage(layer_tree, *compositor_context_, false);
break;
case ScreenshotType::CompressedImage:
format = "ScreenshotType::CompressedImage";
data = ScreenshotLayerTreeAsImage(layer_tree, *compositor_context_, true);
break;
case ScreenshotType::SurfaceData: {
Surface::SurfaceData surface_data = surface_->GetSurfaceData();
format = surface_data.pixel_format;
data.first = surface_data.data;
break;
}
}
if (data.first == nullptr) {
FML_LOG(ERROR) << "Screenshot data was null.";
return {};
}
if (base64_encode) {
size_t b64_size = Base64::EncodedSize(data.first->size());
auto b64_data = SkData::MakeUninitialized(b64_size);
Base64::Encode(data.first->data(), data.first->size(),
b64_data->writable_data());
return Rasterizer::Screenshot{b64_data, layer_tree->frame_size(), format,
data.second};
}
return Rasterizer::Screenshot{data.first, layer_tree->frame_size(), format,
data.second};
}
void Rasterizer::SetNextFrameCallback(const fml::closure& callback) {
next_frame_callback_ = callback;
}
void Rasterizer::SetExternalViewEmbedder(
const std::shared_ptr<ExternalViewEmbedder>& view_embedder) {
external_view_embedder_ = view_embedder;
}
void Rasterizer::SetSnapshotSurfaceProducer(
std::unique_ptr<SnapshotSurfaceProducer> producer) {
snapshot_surface_producer_ = std::move(producer);
}
fml::RefPtr<fml::RasterThreadMerger> Rasterizer::GetRasterThreadMerger() {
return raster_thread_merger_;
}
void Rasterizer::FireNextFrameCallbackIfPresent() {
if (!next_frame_callback_) {
return;
}
// It is safe for the callback to set a new callback.
auto callback = next_frame_callback_;
next_frame_callback_ = nullptr;
callback();
}
void Rasterizer::SetResourceCacheMaxBytes(size_t max_bytes, bool from_user) {
user_override_resource_cache_bytes_ |= from_user;
if (!from_user && user_override_resource_cache_bytes_) {
// We should not update the setting here if a user has explicitly set a
// value for this over the flutter/skia channel.
return;
}
max_cache_bytes_ = max_bytes;
if (!surface_) {
return;
}
GrDirectContext* context = surface_->GetContext();
if (context) {
auto context_switch = surface_->MakeRenderContextCurrent();
if (!context_switch->GetResult()) {
return;
}
context->setResourceCacheLimit(max_bytes);
}
}
std::optional<size_t> Rasterizer::GetResourceCacheMaxBytes() const {
if (!surface_) {
return std::nullopt;
}
GrDirectContext* context = surface_->GetContext();
if (context) {
return context->getResourceCacheLimit();
}
return std::nullopt;
}
Rasterizer::Screenshot::Screenshot() {}
Rasterizer::Screenshot::Screenshot(sk_sp<SkData> p_data,
SkISize p_size,
const std::string& p_format,
ScreenshotFormat p_pixel_format)
: data(std::move(p_data)),
frame_size(p_size),
format(p_format),
pixel_format(p_pixel_format) {}
Rasterizer::Screenshot::Screenshot(const Screenshot& other) = default;
Rasterizer::Screenshot::~Screenshot() = default;
} // namespace flutter
| engine/shell/common/rasterizer.cc/0 | {
"file_path": "engine/shell/common/rasterizer.cc",
"repo_id": "engine",
"token_count": 15961
} | 326 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/common/shell_io_manager.h"
#include "flutter/common/task_runners.h"
#include "flutter/fml/mapping.h"
#include "flutter/lib/ui/painting/multi_frame_codec.h"
#include "flutter/testing/dart_isolate_runner.h"
#include "flutter/testing/fixture_test.h"
#include "flutter/testing/post_task_sync.h"
#include "flutter/testing/test_gl_surface.h" // nogncheck
#include "flutter/testing/testing.h"
namespace flutter {
namespace testing {
class ShellIOManagerTest : public FixtureTest {};
// Regression test for https://github.com/flutter/engine/pull/32106.
TEST_F(ShellIOManagerTest,
ItDoesNotCrashThatSkiaUnrefQueueDrainAfterIOManagerReset) {
auto settings = CreateSettingsForFixture();
auto vm_ref = DartVMRef::Create(settings);
auto vm_data = vm_ref.GetVMData();
auto gif_mapping = flutter::testing::OpenFixtureAsSkData("hello_loop_2.gif");
ASSERT_TRUE(gif_mapping);
ImageGeneratorRegistry registry;
std::shared_ptr<ImageGenerator> gif_generator =
registry.CreateCompatibleGenerator(gif_mapping);
ASSERT_TRUE(gif_generator);
TaskRunners runners(GetCurrentTestName(), // label
CreateNewThread("platform"), // platform
CreateNewThread("raster"), // raster
CreateNewThread("ui"), // ui
CreateNewThread("io") // io
);
std::unique_ptr<TestGLSurface> gl_surface;
std::unique_ptr<ShellIOManager> io_manager;
fml::RefPtr<MultiFrameCodec> codec;
// Setup the IO manager.
PostTaskSync(runners.GetIOTaskRunner(), [&]() {
gl_surface = std::make_unique<TestGLSurface>(SkISize::Make(1, 1));
io_manager = std::make_unique<ShellIOManager>(
gl_surface->CreateGrContext(), std::make_shared<fml::SyncSwitch>(),
runners.GetIOTaskRunner(), nullptr,
fml::TimeDelta::FromMilliseconds(0));
});
auto isolate = RunDartCodeInIsolate(vm_ref, settings, runners, "emptyMain",
{}, GetDefaultKernelFilePath(),
io_manager->GetWeakIOManager());
PostTaskSync(runners.GetUITaskRunner(), [&]() {
fml::AutoResetWaitableEvent isolate_latch;
EXPECT_TRUE(isolate->RunInIsolateScope([&]() -> bool {
Dart_Handle library = Dart_RootLibrary();
if (Dart_IsError(library)) {
isolate_latch.Signal();
return false;
}
Dart_Handle closure =
Dart_GetField(library, Dart_NewStringFromCString("frameCallback"));
if (Dart_IsError(closure) || !Dart_IsClosure(closure)) {
isolate_latch.Signal();
return false;
}
codec = fml::MakeRefCounted<MultiFrameCodec>(std::move(gif_generator));
codec->getNextFrame(closure);
isolate_latch.Signal();
return true;
}));
isolate_latch.Wait();
});
// Destroy the IO manager
PostTaskSync(runners.GetIOTaskRunner(), [&]() {
// 'SkiaUnrefQueue.Drain' will be called after 'io_manager.reset()' in this
// test, If the resource context has been destroyed at that time, it will
// crash.
//
// 'Drain()' currently checks whether the weak pointer is still valid or not
// before trying to call anything on it.
//
// However, calling 'unref' on the 'SkImage_Lazy' ends up freeing a
// 'GrBackendTexture'. That object seems to assume that something else is
// keeping the context alive. This seems like it might be a bad assumption
// on Skia's part, but in Skia's defense we're doing something pretty weird
// here by keeping GPU resident objects alive without keeping the
// 'GrDirectContext' alive ourselves.
//
// See https://github.com/flutter/flutter/issues/87895
io_manager.reset();
gl_surface.reset();
});
}
} // namespace testing
} // namespace flutter
| engine/shell/common/shell_io_manager_unittests.cc/0 | {
"file_path": "engine/shell/common/shell_io_manager_unittests.cc",
"repo_id": "engine",
"token_count": 1542
} | 327 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/common/vsync_waiter.h"
#include "flow/frame_timings.h"
#include "flutter/fml/task_runner.h"
#include "flutter/fml/trace_event.h"
#include "fml/logging.h"
#include "fml/message_loop_task_queues.h"
#include "fml/task_queue_id.h"
#include "fml/time/time_point.h"
namespace flutter {
static constexpr const char* kVsyncFlowName = "VsyncFlow";
static constexpr const char* kVsyncTraceName = "VsyncProcessCallback";
VsyncWaiter::VsyncWaiter(const TaskRunners& task_runners)
: task_runners_(task_runners) {}
VsyncWaiter::~VsyncWaiter() = default;
// Public method invoked by the animator.
void VsyncWaiter::AsyncWaitForVsync(const Callback& callback) {
if (!callback) {
return;
}
TRACE_EVENT0("flutter", "AsyncWaitForVsync");
{
std::scoped_lock lock(callback_mutex_);
if (callback_) {
// The animator may request a frame more than once within a frame
// interval. Multiple calls to request frame must result in a single
// callback per frame interval.
TRACE_EVENT_INSTANT0("flutter", "MultipleCallsToVsyncInFrameInterval");
return;
}
callback_ = callback;
if (!secondary_callbacks_.empty()) {
// Return directly as `AwaitVSync` is already called by
// `ScheduleSecondaryCallback`.
return;
}
}
AwaitVSync();
}
void VsyncWaiter::ScheduleSecondaryCallback(uintptr_t id,
const fml::closure& callback) {
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
if (!callback) {
return;
}
TRACE_EVENT0("flutter", "ScheduleSecondaryCallback");
{
std::scoped_lock lock(callback_mutex_);
bool secondary_callbacks_originally_empty = secondary_callbacks_.empty();
auto [_, inserted] = secondary_callbacks_.emplace(id, callback);
if (!inserted) {
// Multiple schedules must result in a single callback per frame interval.
TRACE_EVENT_INSTANT0("flutter",
"MultipleCallsToSecondaryVsyncInFrameInterval");
return;
}
if (callback_) {
// Return directly as `AwaitVSync` is already called by
// `AsyncWaitForVsync`.
return;
}
if (!secondary_callbacks_originally_empty) {
// Return directly as `AwaitVSync` is already called by
// `ScheduleSecondaryCallback`.
return;
}
}
AwaitVSyncForSecondaryCallback();
}
void VsyncWaiter::FireCallback(fml::TimePoint frame_start_time,
fml::TimePoint frame_target_time,
bool pause_secondary_tasks) {
FML_DCHECK(fml::TimePoint::Now() >= frame_start_time);
Callback callback;
std::vector<fml::closure> secondary_callbacks;
{
std::scoped_lock lock(callback_mutex_);
callback = std::move(callback_);
for (auto& pair : secondary_callbacks_) {
secondary_callbacks.push_back(std::move(pair.second));
}
secondary_callbacks_.clear();
}
if (!callback && secondary_callbacks.empty()) {
// This means that the vsync waiter implementation fired a callback for a
// request we did not make. This is a paranoid check but we still want to
// make sure we catch misbehaving vsync implementations.
TRACE_EVENT_INSTANT0("flutter", "MismatchedFrameCallback");
return;
}
if (callback) {
const uint64_t flow_identifier = fml::tracing::TraceNonce();
if (pause_secondary_tasks) {
PauseDartEventLoopTasks();
}
// The base trace ensures that flows have a root to begin from if one does
// not exist. The trace viewer will ignore traces that have no base event
// trace. While all our message loops insert a base trace trace
// (MessageLoop::RunExpiredTasks), embedders may not.
TRACE_EVENT0_WITH_FLOW_IDS("flutter", "VsyncFireCallback",
/*flow_id_count=*/1,
/*flow_ids=*/&flow_identifier);
TRACE_FLOW_BEGIN("flutter", kVsyncFlowName, flow_identifier);
fml::TaskQueueId ui_task_queue_id =
task_runners_.GetUITaskRunner()->GetTaskQueueId();
task_runners_.GetUITaskRunner()->PostTask(
[ui_task_queue_id, callback, flow_identifier, frame_start_time,
frame_target_time, pause_secondary_tasks]() {
FML_TRACE_EVENT_WITH_FLOW_IDS(
"flutter", kVsyncTraceName, /*flow_id_count=*/1,
/*flow_ids=*/&flow_identifier, "StartTime", frame_start_time,
"TargetTime", frame_target_time);
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder =
std::make_unique<FrameTimingsRecorder>();
frame_timings_recorder->RecordVsync(frame_start_time,
frame_target_time);
callback(std::move(frame_timings_recorder));
TRACE_FLOW_END("flutter", kVsyncFlowName, flow_identifier);
if (pause_secondary_tasks) {
ResumeDartEventLoopTasks(ui_task_queue_id);
}
});
}
for (auto& secondary_callback : secondary_callbacks) {
task_runners_.GetUITaskRunner()->PostTask(secondary_callback);
}
}
void VsyncWaiter::PauseDartEventLoopTasks() {
auto ui_task_queue_id = task_runners_.GetUITaskRunner()->GetTaskQueueId();
auto task_queues = fml::MessageLoopTaskQueues::GetInstance();
task_queues->PauseSecondarySource(ui_task_queue_id);
}
void VsyncWaiter::ResumeDartEventLoopTasks(fml::TaskQueueId ui_task_queue_id) {
auto task_queues = fml::MessageLoopTaskQueues::GetInstance();
task_queues->ResumeSecondarySource(ui_task_queue_id);
}
} // namespace flutter
| engine/shell/common/vsync_waiter.cc/0 | {
"file_path": "engine/shell/common/vsync_waiter.cc",
"repo_id": "engine",
"token_count": 2308
} | 328 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/gpu/gpu_surface_metal_delegate.h"
namespace flutter {
GPUSurfaceMetalDelegate::GPUSurfaceMetalDelegate(
MTLRenderTargetType render_target_type)
: render_target_type_(render_target_type) {}
GPUSurfaceMetalDelegate::~GPUSurfaceMetalDelegate() = default;
MTLRenderTargetType GPUSurfaceMetalDelegate::GetRenderTargetType() {
return render_target_type_;
}
bool GPUSurfaceMetalDelegate::AllowsDrawingWhenGpuDisabled() const {
return true;
}
} // namespace flutter
| engine/shell/gpu/gpu_surface_metal_delegate.cc/0 | {
"file_path": "engine/shell/gpu/gpu_surface_metal_delegate.cc",
"repo_id": "engine",
"token_count": 214
} | 329 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_GPU_GPU_SURFACE_VULKAN_IMPELLER_H_
#define FLUTTER_SHELL_GPU_GPU_SURFACE_VULKAN_IMPELLER_H_
#include "flutter/common/graphics/gl_context_switch.h"
#include "flutter/flow/surface.h"
#include "flutter/fml/macros.h"
#include "flutter/fml/memory/weak_ptr.h"
#include "flutter/impeller/aiks/aiks_context.h"
#include "flutter/impeller/renderer/context.h"
#include "flutter/shell/gpu/gpu_surface_vulkan_delegate.h"
namespace flutter {
class GPUSurfaceVulkanImpeller final : public Surface {
public:
explicit GPUSurfaceVulkanImpeller(std::shared_ptr<impeller::Context> context);
// |Surface|
~GPUSurfaceVulkanImpeller() override;
// |Surface|
bool IsValid() override;
private:
std::shared_ptr<impeller::Context> impeller_context_;
std::shared_ptr<impeller::Renderer> impeller_renderer_;
std::shared_ptr<impeller::AiksContext> aiks_context_;
bool is_valid_ = false;
// |Surface|
std::unique_ptr<SurfaceFrame> AcquireFrame(const SkISize& size) override;
// |Surface|
SkMatrix GetRootTransformation() const override;
// |Surface|
GrDirectContext* GetContext() override;
// |Surface|
std::unique_ptr<GLContextResult> MakeRenderContextCurrent() override;
// |Surface|
bool EnableRasterCache() const override;
// |Surface|
std::shared_ptr<impeller::AiksContext> GetAiksContext() const override;
FML_DISALLOW_COPY_AND_ASSIGN(GPUSurfaceVulkanImpeller);
};
} // namespace flutter
#endif // FLUTTER_SHELL_GPU_GPU_SURFACE_VULKAN_IMPELLER_H_
| engine/shell/gpu/gpu_surface_vulkan_impeller.h/0 | {
"file_path": "engine/shell/gpu/gpu_surface_vulkan_impeller.h",
"repo_id": "engine",
"token_count": 618
} | 330 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/android/android_egl_surface.h"
#include <EGL/eglext.h>
#include <sys/system_properties.h>
#include <array>
#include <list>
#include "flutter/fml/trace_event.h"
namespace flutter {
void LogLastEGLError() {
struct EGLNameErrorPair {
const char* name;
EGLint code;
};
#define _EGL_ERROR_DESC(a) \
{ #a, a }
const EGLNameErrorPair pairs[] = {
_EGL_ERROR_DESC(EGL_SUCCESS),
_EGL_ERROR_DESC(EGL_NOT_INITIALIZED),
_EGL_ERROR_DESC(EGL_BAD_ACCESS),
_EGL_ERROR_DESC(EGL_BAD_ALLOC),
_EGL_ERROR_DESC(EGL_BAD_ATTRIBUTE),
_EGL_ERROR_DESC(EGL_BAD_CONTEXT),
_EGL_ERROR_DESC(EGL_BAD_CONFIG),
_EGL_ERROR_DESC(EGL_BAD_CURRENT_SURFACE),
_EGL_ERROR_DESC(EGL_BAD_DISPLAY),
_EGL_ERROR_DESC(EGL_BAD_SURFACE),
_EGL_ERROR_DESC(EGL_BAD_MATCH),
_EGL_ERROR_DESC(EGL_BAD_PARAMETER),
_EGL_ERROR_DESC(EGL_BAD_NATIVE_PIXMAP),
_EGL_ERROR_DESC(EGL_BAD_NATIVE_WINDOW),
_EGL_ERROR_DESC(EGL_CONTEXT_LOST),
};
#undef _EGL_ERROR_DESC
const auto count = sizeof(pairs) / sizeof(EGLNameErrorPair);
EGLint last_error = eglGetError();
for (size_t i = 0; i < count; i++) {
if (last_error == pairs[i].code) {
FML_LOG(ERROR) << "EGL Error: " << pairs[i].name << " (" << pairs[i].code
<< ")";
return;
}
}
FML_LOG(ERROR) << "Unknown EGL Error";
}
class AndroidEGLSurfaceDamage {
public:
void init(EGLDisplay display, EGLContext context) {}
void SetDamageRegion(EGLDisplay display,
EGLSurface surface,
const std::optional<SkIRect>& region) {}
/// This was disabled after discussion in
/// https://github.com/flutter/flutter/issues/123353
bool SupportsPartialRepaint() const { return false; }
std::optional<SkIRect> InitialDamage(EGLDisplay display, EGLSurface surface) {
return std::nullopt;
}
bool SwapBuffersWithDamage(EGLDisplay display,
EGLSurface surface,
const std::optional<SkIRect>& damage) {
return eglSwapBuffers(display, surface);
}
};
AndroidEGLSurface::AndroidEGLSurface(EGLSurface surface,
EGLDisplay display,
EGLContext context)
: surface_(surface),
display_(display),
context_(context),
damage_(std::make_unique<AndroidEGLSurfaceDamage>()) {
damage_->init(display_, context);
}
AndroidEGLSurface::~AndroidEGLSurface() {
[[maybe_unused]] auto result = eglDestroySurface(display_, surface_);
FML_DCHECK(result == EGL_TRUE);
}
bool AndroidEGLSurface::IsValid() const {
return surface_ != EGL_NO_SURFACE;
}
bool AndroidEGLSurface::IsContextCurrent() const {
EGLContext current_egl_context = eglGetCurrentContext();
if (context_ != current_egl_context) {
return false;
}
EGLDisplay current_egl_display = eglGetCurrentDisplay();
if (display_ != current_egl_display) {
return false;
}
EGLSurface draw_surface = eglGetCurrentSurface(EGL_DRAW);
if (draw_surface != surface_) {
return false;
}
EGLSurface read_surface = eglGetCurrentSurface(EGL_READ);
if (read_surface != surface_) {
return false;
}
return true;
}
AndroidEGLSurfaceMakeCurrentStatus AndroidEGLSurface::MakeCurrent() const {
if (IsContextCurrent()) {
return AndroidEGLSurfaceMakeCurrentStatus::kSuccessAlreadyCurrent;
}
if (eglMakeCurrent(display_, surface_, surface_, context_) != EGL_TRUE) {
FML_LOG(ERROR) << "Could not make the context current";
LogLastEGLError();
return AndroidEGLSurfaceMakeCurrentStatus::kFailure;
}
return AndroidEGLSurfaceMakeCurrentStatus::kSuccessMadeCurrent;
}
void AndroidEGLSurface::SetDamageRegion(
const std::optional<SkIRect>& buffer_damage) {
damage_->SetDamageRegion(display_, surface_, buffer_damage);
}
bool AndroidEGLSurface::SetPresentationTime(
const fml::TimePoint& presentation_time) {
if (presentation_time_proc_) {
const auto time_ns = presentation_time.ToEpochDelta().ToNanoseconds();
return presentation_time_proc_(display_, surface_, time_ns);
} else {
return false;
}
}
bool AndroidEGLSurface::SwapBuffers(
const std::optional<SkIRect>& surface_damage) {
TRACE_EVENT0("flutter", "AndroidContextGL::SwapBuffers");
return damage_->SwapBuffersWithDamage(display_, surface_, surface_damage);
}
bool AndroidEGLSurface::SupportsPartialRepaint() const {
return damage_->SupportsPartialRepaint();
}
std::optional<SkIRect> AndroidEGLSurface::InitialDamage() {
return damage_->InitialDamage(display_, surface_);
}
SkISize AndroidEGLSurface::GetSize() const {
EGLint width = 0;
EGLint height = 0;
if (!eglQuerySurface(display_, surface_, EGL_WIDTH, &width) ||
!eglQuerySurface(display_, surface_, EGL_HEIGHT, &height)) {
FML_LOG(ERROR) << "Unable to query EGL surface size";
LogLastEGLError();
return SkISize::Make(0, 0);
}
return SkISize::Make(width, height);
}
} // namespace flutter
| engine/shell/platform/android/android_egl_surface.cc/0 | {
"file_path": "engine/shell/platform/android/android_egl_surface.cc",
"repo_id": "engine",
"token_count": 2136
} | 331 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/android/android_surface_vulkan_impeller.h"
#include <memory>
#include <utility>
#include "flutter/fml/concurrent_message_loop.h"
#include "flutter/fml/logging.h"
#include "flutter/fml/memory/ref_ptr.h"
#include "flutter/impeller/renderer/backend/vulkan/context_vk.h"
#include "flutter/shell/gpu/gpu_surface_vulkan_impeller.h"
#include "flutter/vulkan/vulkan_native_surface_android.h"
namespace flutter {
AndroidSurfaceVulkanImpeller::AndroidSurfaceVulkanImpeller(
const std::shared_ptr<AndroidContextVulkanImpeller>& android_context) {
is_valid_ = android_context->IsValid();
auto& context_vk =
impeller::ContextVK::Cast(*android_context->GetImpellerContext());
surface_context_vk_ = context_vk.CreateSurfaceContext();
}
AndroidSurfaceVulkanImpeller::~AndroidSurfaceVulkanImpeller() = default;
bool AndroidSurfaceVulkanImpeller::IsValid() const {
return is_valid_;
}
void AndroidSurfaceVulkanImpeller::TeardownOnScreenContext() {
// Nothing to do.
}
std::unique_ptr<Surface> AndroidSurfaceVulkanImpeller::CreateGPUSurface(
GrDirectContext* gr_context) {
if (!IsValid()) {
return nullptr;
}
if (!native_window_ || !native_window_->IsValid()) {
return nullptr;
}
std::unique_ptr<GPUSurfaceVulkanImpeller> gpu_surface =
std::make_unique<GPUSurfaceVulkanImpeller>(surface_context_vk_);
if (!gpu_surface->IsValid()) {
return nullptr;
}
return gpu_surface;
}
bool AndroidSurfaceVulkanImpeller::OnScreenSurfaceResize(const SkISize& size) {
surface_context_vk_->UpdateSurfaceSize(
impeller::ISize{size.width(), size.height()});
return true;
}
bool AndroidSurfaceVulkanImpeller::ResourceContextMakeCurrent() {
return true;
}
bool AndroidSurfaceVulkanImpeller::ResourceContextClearCurrent() {
return true;
}
bool AndroidSurfaceVulkanImpeller::SetNativeWindow(
fml::RefPtr<AndroidNativeWindow> window) {
native_window_ = std::move(window);
bool success = native_window_ && native_window_->IsValid();
if (success) {
auto surface =
surface_context_vk_->CreateAndroidSurface(native_window_->handle());
if (!surface) {
FML_LOG(ERROR) << "Could not create a vulkan surface.";
return false;
}
auto size = native_window_->GetSize();
return surface_context_vk_->SetWindowSurface(
std::move(surface), impeller::ISize{size.width(), size.height()});
}
native_window_ = nullptr;
return false;
}
std::shared_ptr<impeller::Context>
AndroidSurfaceVulkanImpeller::GetImpellerContext() {
return surface_context_vk_;
}
} // namespace flutter
| engine/shell/platform/android/android_surface_vulkan_impeller.cc/0 | {
"file_path": "engine/shell/platform/android/android_surface_vulkan_impeller.cc",
"repo_id": "engine",
"token_count": 985
} | 332 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#define FML_USED_ON_EMBEDDER
#include <android/log.h>
#include <optional>
#include <vector>
#include "common/settings.h"
#include "flutter/fml/command_line.h"
#include "flutter/fml/file.h"
#include "flutter/fml/logging.h"
#include "flutter/fml/macros.h"
#include "flutter/fml/message_loop.h"
#include "flutter/fml/native_library.h"
#include "flutter/fml/paths.h"
#include "flutter/fml/platform/android/jni_util.h"
#include "flutter/fml/platform/android/paths_android.h"
#include "flutter/fml/size.h"
#include "flutter/lib/ui/plugins/callback_cache.h"
#include "flutter/runtime/dart_vm.h"
#include "flutter/shell/common/shell.h"
#include "flutter/shell/common/switches.h"
#include "flutter/shell/platform/android/android_context_vulkan_impeller.h"
#include "flutter/shell/platform/android/flutter_main.h"
#include "impeller/base/validation.h"
#include "impeller/toolkit/android/proc_table.h"
#include "third_party/dart/runtime/include/dart_tools_api.h"
#include "txt/platform.h"
namespace flutter {
constexpr int kMinimumAndroidApiLevelForVulkan = 29;
extern "C" {
#if FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG
// Used for debugging dart:* sources.
extern const uint8_t kPlatformStrongDill[];
extern const intptr_t kPlatformStrongDillSize;
#endif
}
namespace {
fml::jni::ScopedJavaGlobalRef<jclass>* g_flutter_jni_class = nullptr;
} // anonymous namespace
FlutterMain::FlutterMain(const flutter::Settings& settings)
: settings_(settings) {}
FlutterMain::~FlutterMain() = default;
static std::unique_ptr<FlutterMain> g_flutter_main;
FlutterMain& FlutterMain::Get() {
FML_CHECK(g_flutter_main) << "ensureInitializationComplete must have already "
"been called.";
return *g_flutter_main;
}
const flutter::Settings& FlutterMain::GetSettings() const {
return settings_;
}
void FlutterMain::Init(JNIEnv* env,
jclass clazz,
jobject context,
jobjectArray jargs,
jstring kernelPath,
jstring appStoragePath,
jstring engineCachesPath,
jlong initTimeMillis) {
std::vector<std::string> args;
args.push_back("flutter");
for (auto& arg : fml::jni::StringArrayToVector(env, jargs)) {
args.push_back(std::move(arg));
}
auto command_line = fml::CommandLineFromIterators(args.begin(), args.end());
auto settings = SettingsFromCommandLine(command_line);
// Turn systracing on if ATrace_isEnabled is true and the user did not already
// request systracing
if (!settings.trace_systrace) {
settings.trace_systrace =
impeller::android::GetProcTable().TraceIsEnabled();
if (settings.trace_systrace) {
__android_log_print(
ANDROID_LOG_INFO, "Flutter",
"ATrace was enabled at startup. Flutter and Dart "
"tracing will be forwarded to systrace and will not show up in "
"Dart DevTools.");
}
}
settings.android_rendering_api = SelectedRenderingAPI(settings);
switch (settings.android_rendering_api) {
case AndroidRenderingAPI::kSoftware:
case AndroidRenderingAPI::kSkiaOpenGLES:
settings.enable_impeller = false;
break;
case AndroidRenderingAPI::kImpellerOpenGLES:
case AndroidRenderingAPI::kImpellerVulkan:
settings.enable_impeller = true;
break;
}
#if FLUTTER_RELEASE
// On most platforms the timeline is always disabled in release mode.
// On Android, enable it in release mode only when using systrace.
settings.enable_timeline_event_handler = settings.trace_systrace;
#endif // FLUTTER_RELEASE
// Restore the callback cache.
// TODO(chinmaygarde): Route all cache file access through FML and remove this
// setter.
flutter::DartCallbackCache::SetCachePath(
fml::jni::JavaStringToString(env, appStoragePath));
fml::paths::InitializeAndroidCachesPath(
fml::jni::JavaStringToString(env, engineCachesPath));
flutter::DartCallbackCache::LoadCacheFromDisk();
if (!flutter::DartVM::IsRunningPrecompiledCode() && kernelPath) {
// Check to see if the appropriate kernel files are present and configure
// settings accordingly.
auto application_kernel_path =
fml::jni::JavaStringToString(env, kernelPath);
if (fml::IsFile(application_kernel_path)) {
settings.application_kernel_asset = application_kernel_path;
}
}
settings.task_observer_add = [](intptr_t key, const fml::closure& callback) {
fml::MessageLoop::GetCurrent().AddTaskObserver(key, callback);
};
settings.task_observer_remove = [](intptr_t key) {
fml::MessageLoop::GetCurrent().RemoveTaskObserver(key);
};
settings.log_message_callback = [](const std::string& tag,
const std::string& message) {
__android_log_print(ANDROID_LOG_INFO, tag.c_str(), "%.*s",
static_cast<int>(message.size()), message.c_str());
};
#if FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG
// There are no ownership concerns here as all mappings are owned by the
// embedder and not the engine.
auto make_mapping_callback = [](const uint8_t* mapping, size_t size) {
return [mapping, size]() {
return std::make_unique<fml::NonOwnedMapping>(mapping, size);
};
};
settings.dart_library_sources_kernel =
make_mapping_callback(kPlatformStrongDill, kPlatformStrongDillSize);
#endif // FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG
// Not thread safe. Will be removed when FlutterMain is refactored to no
// longer be a singleton.
g_flutter_main.reset(new FlutterMain(settings));
g_flutter_main->SetupDartVMServiceUriCallback(env);
}
void FlutterMain::SetupDartVMServiceUriCallback(JNIEnv* env) {
g_flutter_jni_class = new fml::jni::ScopedJavaGlobalRef<jclass>(
env, env->FindClass("io/flutter/embedding/engine/FlutterJNI"));
if (g_flutter_jni_class->is_null()) {
return;
}
jfieldID uri_field = env->GetStaticFieldID(
g_flutter_jni_class->obj(), "vmServiceUri", "Ljava/lang/String;");
if (uri_field == nullptr) {
return;
}
auto set_uri = [env, uri_field](const std::string& uri) {
fml::jni::ScopedJavaLocalRef<jstring> java_uri =
fml::jni::StringToJavaString(env, uri);
env->SetStaticObjectField(g_flutter_jni_class->obj(), uri_field,
java_uri.obj());
};
fml::MessageLoop::EnsureInitializedForCurrentThread();
fml::RefPtr<fml::TaskRunner> platform_runner =
fml::MessageLoop::GetCurrent().GetTaskRunner();
vm_service_uri_callback_ = DartServiceIsolate::AddServerStatusCallback(
[platform_runner, set_uri](const std::string& uri) {
platform_runner->PostTask([uri, set_uri] { set_uri(uri); });
});
}
static void PrefetchDefaultFontManager(JNIEnv* env, jclass jcaller) {
// Initialize a singleton owned by Skia.
txt::GetDefaultFontManager();
}
bool FlutterMain::Register(JNIEnv* env) {
static const JNINativeMethod methods[] = {
{
.name = "nativeInit",
.signature = "(Landroid/content/Context;[Ljava/lang/String;Ljava/"
"lang/String;Ljava/lang/String;Ljava/lang/String;J)V",
.fnPtr = reinterpret_cast<void*>(&Init),
},
{
.name = "nativePrefetchDefaultFontManager",
.signature = "()V",
.fnPtr = reinterpret_cast<void*>(&PrefetchDefaultFontManager),
},
};
jclass clazz = env->FindClass("io/flutter/embedding/engine/FlutterJNI");
if (clazz == nullptr) {
return false;
}
return env->RegisterNatives(clazz, methods, fml::size(methods)) == 0;
}
// static
AndroidRenderingAPI FlutterMain::SelectedRenderingAPI(
const flutter::Settings& settings) {
if (settings.enable_software_rendering) {
FML_CHECK(!settings.enable_impeller)
<< "Impeller does not support software rendering. Either disable "
"software rendering or disable impeller.";
return AndroidRenderingAPI::kSoftware;
}
constexpr AndroidRenderingAPI kVulkanUnsupportedFallback =
AndroidRenderingAPI::kSkiaOpenGLES;
// Debug/Profile only functionality for testing a specific
// backend configuration.
#ifndef FLUTTER_RELEASE
if (settings.requested_rendering_backend == "opengles" &
settings.enable_impeller) {
return AndroidRenderingAPI::kImpellerOpenGLES;
}
if (settings.requested_rendering_backend == "vulkan" &&
settings.enable_impeller) {
return AndroidRenderingAPI::kImpellerVulkan;
}
#endif
if (settings.enable_impeller) {
// Vulkan must only be used on API level 29+, as older API levels do not
// have requisite features to support platform views.
//
// Even if this check returns true, Impeller may determine it cannot use
// Vulkan for some other reason, such as a missing required extension or
// feature.
int api_level = android_get_device_api_level();
if (api_level < kMinimumAndroidApiLevelForVulkan) {
return kVulkanUnsupportedFallback;
}
// Determine if Vulkan is supported by creating a Vulkan context and
// checking if it is valid.
impeller::ScopedValidationDisable disable_validation;
auto vulkan_backend = std::make_unique<AndroidContextVulkanImpeller>(
/*enable_vulkan_validation=*/false,
/*enable_vulkan_gpu_tracing=*/false,
/*quiet=*/true);
if (!vulkan_backend->IsValid()) {
return kVulkanUnsupportedFallback;
}
return AndroidRenderingAPI::kImpellerVulkan;
}
return AndroidRenderingAPI::kSkiaOpenGLES;
}
} // namespace flutter
| engine/shell/platform/android/flutter_main.cc/0 | {
"file_path": "engine/shell/platform/android/flutter_main.cc",
"repo_id": "engine",
"token_count": 3745
} | 333 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
* Port of {@link android.util.Log} that only logs in {@value io.flutter.BuildConfig#DEBUG} mode and
* internally filters logs based on a {@link #logLevel}.
*/
public class Log {
private static int logLevel = android.util.Log.DEBUG;
public static int ASSERT = android.util.Log.ASSERT;
public static int DEBUG = android.util.Log.DEBUG;
public static int ERROR = android.util.Log.ERROR;
public static int INFO = android.util.Log.INFO;
public static int VERBOSE = android.util.Log.VERBOSE;
public static int WARN = android.util.Log.WARN;
/**
* Sets a log cutoff such that a log level of lower priority than {@code logLevel} is filtered
* out.
*
* <p>See {@link android.util.Log} for log level constants.
*/
public static void setLogLevel(int logLevel) {
Log.logLevel = logLevel;
}
public static void println(@NonNull int level, @NonNull String tag, @NonNull String message) {
if (BuildConfig.DEBUG && logLevel <= level) {
android.util.Log.println(level, tag, message);
}
}
public static void v(@NonNull String tag, @NonNull String message) {
if (BuildConfig.DEBUG && logLevel <= android.util.Log.VERBOSE) {
android.util.Log.v(tag, message);
}
}
public static void v(@NonNull String tag, @NonNull String message, @NonNull Throwable tr) {
if (BuildConfig.DEBUG && logLevel <= android.util.Log.VERBOSE) {
android.util.Log.v(tag, message, tr);
}
}
public static void i(@NonNull String tag, @NonNull String message) {
if (BuildConfig.DEBUG && logLevel <= android.util.Log.INFO) {
android.util.Log.i(tag, message);
}
}
public static void i(@NonNull String tag, @NonNull String message, @NonNull Throwable tr) {
if (BuildConfig.DEBUG && logLevel <= android.util.Log.INFO) {
android.util.Log.i(tag, message, tr);
}
}
public static void d(@NonNull String tag, @NonNull String message) {
if (BuildConfig.DEBUG && logLevel <= android.util.Log.DEBUG) {
android.util.Log.d(tag, message);
}
}
public static void d(@NonNull String tag, @NonNull String message, @NonNull Throwable tr) {
if (BuildConfig.DEBUG && logLevel <= android.util.Log.DEBUG) {
android.util.Log.d(tag, message, tr);
}
}
public static void w(@NonNull String tag, @NonNull String message) {
android.util.Log.w(tag, message);
}
public static void w(@NonNull String tag, @NonNull String message, @NonNull Throwable tr) {
android.util.Log.w(tag, message, tr);
}
public static void e(@NonNull String tag, @NonNull String message) {
android.util.Log.e(tag, message);
}
public static void e(@NonNull String tag, @NonNull String message, @NonNull Throwable tr) {
android.util.Log.e(tag, message, tr);
}
public static void wtf(@NonNull String tag, @NonNull String message) {
android.util.Log.wtf(tag, message);
}
public static void wtf(@NonNull String tag, @NonNull String message, @NonNull Throwable tr) {
android.util.Log.wtf(tag, message, tr);
}
@NonNull
public static String getStackTraceString(@Nullable Throwable tr) {
return android.util.Log.getStackTraceString(tr);
}
}
| engine/shell/platform/android/io/flutter/Log.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/Log.java",
"repo_id": "engine",
"token_count": 1163
} | 334 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.android;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DART_ENTRYPOINT_META_DATA_KEY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DART_ENTRYPOINT_URI_META_DATA_KEY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DEFAULT_BACKGROUND_MODE;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DEFAULT_DART_ENTRYPOINT;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DEFAULT_INITIAL_ROUTE;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_BACKGROUND_MODE;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_CACHED_ENGINE_GROUP_ID;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_CACHED_ENGINE_ID;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_DART_ENTRYPOINT;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_DART_ENTRYPOINT_ARGS;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_DESTROY_ENGINE_WITH_ACTIVITY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_INITIAL_ROUTE;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.HANDLE_DEEPLINKING_META_DATA_KEY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.INITIAL_ROUTE_META_DATA_KEY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.NORMAL_THEME_META_DATA_KEY;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.fragment.app.FragmentActivity;
import androidx.fragment.app.FragmentManager;
import io.flutter.Log;
import io.flutter.embedding.android.FlutterActivityLaunchConfigs.BackgroundMode;
import io.flutter.embedding.engine.FlutterEngine;
import io.flutter.embedding.engine.FlutterShellArgs;
import io.flutter.embedding.engine.plugins.util.GeneratedPluginRegister;
import io.flutter.plugin.platform.PlatformPlugin;
import java.util.ArrayList;
import java.util.List;
/**
* A Flutter {@code Activity} that is based upon {@link FragmentActivity}.
*
* <p>{@code FlutterFragmentActivity} exists because there are some Android APIs in the ecosystem
* that only accept a {@link FragmentActivity}. If a {@link FragmentActivity} is not required, you
* should consider using a regular {@link FlutterActivity} instead, because {@link FlutterActivity}
* is considered to be the standard, canonical implementation of a Flutter {@code Activity}.
*/
// A number of methods in this class have the same implementation as FlutterActivity. These methods
// are duplicated for readability purposes. Be sure to replicate any change in this class in
// FlutterActivity, too.
public class FlutterFragmentActivity extends FragmentActivity
implements FlutterEngineProvider, FlutterEngineConfigurator {
private static final String TAG = "FlutterFragmentActivity";
// FlutterFragment management.
private static final String TAG_FLUTTER_FRAGMENT = "flutter_fragment";
// TODO(mattcarroll): replace ID with R.id when build system supports R.java
public static final int FRAGMENT_CONTAINER_ID = View.generateViewId();
/**
* Creates an {@link Intent} that launches a {@code FlutterFragmentActivity}, which executes a
* {@code main()} Dart entrypoint, and displays the "/" route as Flutter's initial route.
*/
@NonNull
public static Intent createDefaultIntent(@NonNull Context launchContext) {
return withNewEngine().build(launchContext);
}
/**
* Creates an {@link FlutterFragmentActivity.NewEngineIntentBuilder}, which can be used to
* configure an {@link Intent} to launch a {@code FlutterFragmentActivity} that internally creates
* a new {@link io.flutter.embedding.engine.FlutterEngine} using the desired Dart entrypoint,
* initial route, etc.
*/
@NonNull
public static NewEngineIntentBuilder withNewEngine() {
return new NewEngineIntentBuilder(FlutterFragmentActivity.class);
}
/**
* Builder to create an {@code Intent} that launches a {@code FlutterFragmentActivity} with a new
* {@link io.flutter.embedding.engine.FlutterEngine} and the desired configuration.
*/
public static class NewEngineIntentBuilder {
private final Class<? extends FlutterFragmentActivity> activityClass;
private String initialRoute = DEFAULT_INITIAL_ROUTE;
private String backgroundMode = DEFAULT_BACKGROUND_MODE;
@Nullable private List<String> dartEntrypointArgs;
/**
* Constructor that allows this {@code NewEngineIntentBuilder} to be used by subclasses of
* {@code FlutterFragmentActivity}.
*
* <p>Subclasses of {@code FlutterFragmentActivity} should provide their own static version of
* {@link #withNewEngine()}, which returns an instance of {@code NewEngineIntentBuilder}
* constructed with a {@code Class} reference to the {@code FlutterFragmentActivity} subclass,
* e.g.:
*
* <p>{@code return new NewEngineIntentBuilder(MyFlutterActivity.class); }
*/
public NewEngineIntentBuilder(@NonNull Class<? extends FlutterFragmentActivity> activityClass) {
this.activityClass = activityClass;
}
/**
* The initial route that a Flutter app will render in this {@code FlutterFragmentActivity},
* defaults to "/".
*/
@NonNull
public NewEngineIntentBuilder initialRoute(@NonNull String initialRoute) {
this.initialRoute = initialRoute;
return this;
}
/**
* The mode of {@code FlutterFragmentActivity}'s background, either {@link
* BackgroundMode#opaque} or {@link BackgroundMode#transparent}.
*
* <p>The default background mode is {@link BackgroundMode#opaque}.
*
* <p>Choosing a background mode of {@link BackgroundMode#transparent} will configure the inner
* {@link FlutterView} of this {@code FlutterFragmentActivity} to be configured with a {@link
* FlutterTextureView} to support transparency. This choice has a non-trivial performance
* impact. A transparent background should only be used if it is necessary for the app design
* being implemented.
*
* <p>A {@code FlutterFragmentActivity} that is configured with a background mode of {@link
* BackgroundMode#transparent} must have a theme applied to it that includes the following
* property: {@code <item name="android:windowIsTranslucent">true</item>}.
*/
@NonNull
public NewEngineIntentBuilder backgroundMode(@NonNull BackgroundMode backgroundMode) {
this.backgroundMode = backgroundMode.name();
return this;
}
/**
* The Dart entrypoint arguments will be passed as a list of string to Dart's entrypoint
* function.
*
* <p>A value of null means do not pass any arguments to Dart's entrypoint function.
*
* @param dartEntrypointArgs The Dart entrypoint arguments.
* @return The engine intent builder.
*/
@NonNull
public NewEngineIntentBuilder dartEntrypointArgs(@Nullable List<String> dartEntrypointArgs) {
this.dartEntrypointArgs = dartEntrypointArgs;
return this;
}
/**
* Creates and returns an {@link Intent} that will launch a {@code FlutterFragmentActivity} with
* the desired configuration.
*/
@NonNull
public Intent build(@NonNull Context context) {
Intent intent =
new Intent(context, activityClass)
.putExtra(EXTRA_INITIAL_ROUTE, initialRoute)
.putExtra(EXTRA_BACKGROUND_MODE, backgroundMode)
.putExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, true);
if (dartEntrypointArgs != null) {
intent.putExtra(EXTRA_DART_ENTRYPOINT_ARGS, new ArrayList(dartEntrypointArgs));
}
return intent;
}
}
/**
* Creates a {@link CachedEngineIntentBuilder}, which can be used to configure an {@link Intent}
* to launch a {@code FlutterFragmentActivity} that internally uses an existing {@link
* FlutterEngine} that is cached in {@link io.flutter.embedding.engine.FlutterEngineCache}.
*/
@NonNull
public static CachedEngineIntentBuilder withCachedEngine(@NonNull String cachedEngineId) {
return new CachedEngineIntentBuilder(FlutterFragmentActivity.class, cachedEngineId);
}
/**
* Builder to create an {@code Intent} that launches a {@code FlutterFragmentActivity} with an
* existing {@link io.flutter.embedding.engine.FlutterEngine} that is cached in {@link
* io.flutter.embedding.engine.FlutterEngineCache}.
*/
public static class CachedEngineIntentBuilder {
private final Class<? extends FlutterFragmentActivity> activityClass;
private final String cachedEngineId;
private boolean destroyEngineWithActivity = false;
private String backgroundMode = DEFAULT_BACKGROUND_MODE;
/**
* Constructor that allows this {@code CachedEngineIntentBuilder} to be used by subclasses of
* {@code FlutterFragmentActivity}.
*
* <p>Subclasses of {@code FlutterFragmentActivity} should provide their own static version of
* {@link #withCachedEngine(String)}, which returns an instance of {@code
* CachedEngineIntentBuilder} constructed with a {@code Class} reference to the {@code
* FlutterFragmentActivity} subclass, e.g.:
*
* <p>{@code return new CachedEngineIntentBuilder(MyFlutterActivity.class, engineId); }
*/
public CachedEngineIntentBuilder(
@NonNull Class<? extends FlutterFragmentActivity> activityClass, @NonNull String engineId) {
this.activityClass = activityClass;
this.cachedEngineId = engineId;
}
/**
* Returns true if the cached {@link io.flutter.embedding.engine.FlutterEngine} should be
* destroyed and removed from the cache when this {@code FlutterFragmentActivity} is destroyed.
*
* <p>The default value is {@code false}.
*/
public CachedEngineIntentBuilder destroyEngineWithActivity(boolean destroyEngineWithActivity) {
this.destroyEngineWithActivity = destroyEngineWithActivity;
return this;
}
/**
* The mode of {@code FlutterFragmentActivity}'s background, either {@link
* BackgroundMode#opaque} or {@link BackgroundMode#transparent}.
*
* <p>The default background mode is {@link BackgroundMode#opaque}.
*
* <p>Choosing a background mode of {@link BackgroundMode#transparent} will configure the inner
* {@link FlutterView} of this {@code FlutterFragmentActivity} to be configured with a {@link
* FlutterTextureView} to support transparency. This choice has a non-trivial performance
* impact. A transparent background should only be used if it is necessary for the app design
* being implemented.
*
* <p>A {@code FlutterFragmentActivity} that is configured with a background mode of {@link
* BackgroundMode#transparent} must have a theme applied to it that includes the following
* property: {@code <item name="android:windowIsTranslucent">true</item>}.
*/
@NonNull
public CachedEngineIntentBuilder backgroundMode(@NonNull BackgroundMode backgroundMode) {
this.backgroundMode = backgroundMode.name();
return this;
}
/**
* Creates and returns an {@link Intent} that will launch a {@code FlutterFragmentActivity} with
* the desired configuration.
*/
@NonNull
public Intent build(@NonNull Context context) {
return new Intent(context, activityClass)
.putExtra(EXTRA_CACHED_ENGINE_ID, cachedEngineId)
.putExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, destroyEngineWithActivity)
.putExtra(EXTRA_BACKGROUND_MODE, backgroundMode);
}
}
/**
* Creates a {@link NewEngineInGroupIntentBuilder}, which can be used to configure an {@link
* Intent} to launch a {@code FlutterFragmentActivity} that internally uses an existing {@link
* io.flutter.embedding.engine.FlutterEngineGroup} that is cached in {@link
* io.flutter.embedding.engine.FlutterEngineGroupCache}.
*
* @param engineGroupId A cached engine group ID.
* @return The builder.
*/
public static NewEngineInGroupIntentBuilder withNewEngineInGroup(@NonNull String engineGroupId) {
return new NewEngineInGroupIntentBuilder(FlutterFragmentActivity.class, engineGroupId);
}
/**
* Builder to create an {@code Intent} that launches a {@code FlutterFragmentActivity} with a new
* {@link FlutterEngine} by FlutterEngineGroup#createAndRunEngine.
*/
public static class NewEngineInGroupIntentBuilder {
private final Class<? extends FlutterFragmentActivity> activityClass;
private final String cachedEngineGroupId;
private String dartEntrypoint = DEFAULT_DART_ENTRYPOINT;
private String initialRoute = DEFAULT_INITIAL_ROUTE;
private String backgroundMode = DEFAULT_BACKGROUND_MODE;
/**
* Constructor that allows this {@code NewEngineInGroupIntentBuilder} to be used by subclasses
* of {@code FlutterActivity}.
*
* <p>Subclasses of {@code FlutterFragmentActivity} should provide their own static version of
* {@link #withNewEngineInGroup}, which returns an instance of {@code
* NewEngineInGroupIntentBuilder} constructed with a {@code Class} reference to the {@code
* FlutterFragmentActivity} subclass, e.g.:
*
* <p>{@code return new NewEngineInGroupIntentBuilder(FlutterFragmentActivity.class,
* cacheedEngineGroupId); }
*
* @param activityClass A subclass of {@code FlutterFragmentActivity}.
* @param engineGroupId The engine group id.
*/
public NewEngineInGroupIntentBuilder(
@NonNull Class<? extends FlutterFragmentActivity> activityClass,
@NonNull String engineGroupId) {
this.activityClass = activityClass;
this.cachedEngineGroupId = engineGroupId;
}
/**
* The Dart entrypoint that will be executed as soon as the Dart snapshot is loaded, default to
* "main".
*
* @param dartEntrypoint The dart entrypoint's name
* @return The engine group intent builder
*/
@NonNull
public NewEngineInGroupIntentBuilder dartEntrypoint(@NonNull String dartEntrypoint) {
this.dartEntrypoint = dartEntrypoint;
return this;
}
/**
* The initial route that a Flutter app will render in this {@code FlutterFragmentActivity},
* defaults to "/".
*/
@NonNull
public NewEngineInGroupIntentBuilder initialRoute(@NonNull String initialRoute) {
this.initialRoute = initialRoute;
return this;
}
/**
* The mode of {@code FlutterFragmentActivity}'s background, either {@link
* BackgroundMode#opaque} or {@link BackgroundMode#transparent}.
*
* <p>The default background mode is {@link BackgroundMode#opaque}.
*
* <p>Choosing a background mode of {@link BackgroundMode#transparent} will configure the inner
* {@link FlutterView} of this {@code FlutterFragmentActivity} to be configured with a {@link
* FlutterTextureView} to support transparency. This choice has a non-trivial performance
* impact. A transparent background should only be used if it is necessary for the app design
* being implemented.
*
* <p>A {@code FlutterFragmentActivity} that is configured with a background mode of {@link
* BackgroundMode#transparent} must have a theme applied to it that includes the following
* property: {@code <item name="android:windowIsTranslucent">true</item>}.
*/
@NonNull
public NewEngineInGroupIntentBuilder backgroundMode(@NonNull BackgroundMode backgroundMode) {
this.backgroundMode = backgroundMode.name();
return this;
}
/**
* Creates and returns an {@link Intent} that will launch a {@code FlutterFragmentActivity} with
* the desired configuration.
*/
@NonNull
public Intent build(@NonNull Context context) {
return new Intent(context, activityClass)
.putExtra(EXTRA_DART_ENTRYPOINT, dartEntrypoint)
.putExtra(EXTRA_INITIAL_ROUTE, initialRoute)
.putExtra(EXTRA_CACHED_ENGINE_GROUP_ID, cachedEngineGroupId)
.putExtra(EXTRA_BACKGROUND_MODE, backgroundMode)
.putExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, true);
}
}
@Nullable private FlutterFragment flutterFragment;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
switchLaunchThemeForNormalTheme();
// Get an existing fragment reference first before onCreate since onCreate would re-attach
// existing fragments. This would cause FlutterFragment to reference the host activity which
// should be aware of its child fragment.
flutterFragment = retrieveExistingFlutterFragmentIfPossible();
super.onCreate(savedInstanceState);
configureWindowForTransparency();
setContentView(createFragmentContainer());
configureStatusBarForFullscreenFlutterExperience();
ensureFlutterFragmentCreated();
}
/**
* Switches themes for this {@code Activity} from the theme used to launch this {@code Activity}
* to a "normal theme" that is intended for regular {@code Activity} operation.
*
* <p>This behavior is offered so that a "launch screen" can be displayed while the application
* initially loads. To utilize this behavior in an app, do the following:
*
* <ol>
* <li>Create 2 different themes in style.xml: one theme for the launch screen and one theme for
* normal display.
* <li>In the launch screen theme, set the "windowBackground" property to a {@code Drawable} of
* your choice.
* <li>In the normal theme, customize however you'd like.
* <li>In the AndroidManifest.xml, set the theme of your {@code FlutterFragmentActivity} to your
* launch theme.
* <li>Add a {@code <meta-data>} property to your {@code FlutterFragmentActivity} with a name of
* "io.flutter.embedding.android.NormalTheme" and set the resource to your normal theme,
* e.g., {@code android:resource="@style/MyNormalTheme}.
* </ol>
*
* With the above settings, your launch theme will be used when loading the app, and then the
* theme will be switched to your normal theme once the app has initialized.
*
* <p>Do not change aspects of system chrome between a launch theme and normal theme. Either
* define both themes to be fullscreen or not, and define both themes to display the same status
* bar and navigation bar settings. If you wish to adjust system chrome once your Flutter app
* renders, use platform channels to instruct Android to do so at the appropriate time. This will
* avoid any jarring visual changes during app startup.
*/
private void switchLaunchThemeForNormalTheme() {
try {
Bundle metaData = getMetaData();
if (metaData != null) {
int normalThemeRID = metaData.getInt(NORMAL_THEME_META_DATA_KEY, -1);
if (normalThemeRID != -1) {
setTheme(normalThemeRID);
}
} else {
Log.v(TAG, "Using the launch theme as normal theme.");
}
} catch (PackageManager.NameNotFoundException exception) {
Log.e(
TAG,
"Could not read meta-data for FlutterFragmentActivity. Using the launch theme as normal theme.");
}
}
/**
* Sets this {@code Activity}'s {@code Window} background to be transparent, and hides the status
* bar, if this {@code Activity}'s desired {@link BackgroundMode} is {@link
* BackgroundMode#transparent}.
*
* <p>For {@code Activity} transparency to work as expected, the theme applied to this {@code
* Activity} must include {@code <item name="android:windowIsTranslucent">true</item>}.
*/
private void configureWindowForTransparency() {
BackgroundMode backgroundMode = getBackgroundMode();
if (backgroundMode == BackgroundMode.transparent) {
getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
}
}
/**
* Creates a {@link FrameLayout} with an ID of {@code #FRAGMENT_CONTAINER_ID} that will contain
* the {@link FlutterFragment} displayed by this {@code FlutterFragmentActivity}.
*
* <p>
*
* @return the FrameLayout container
*/
@NonNull
private View createFragmentContainer() {
FrameLayout container = provideRootLayout(this);
container.setId(FRAGMENT_CONTAINER_ID);
container.setLayoutParams(
new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
return container;
}
/**
* Retrieves the previously created {@link FlutterFragment} if possible.
*
* <p>If the activity is recreated, an existing {@link FlutterFragment} may already exist. Retain
* a reference to that {@link FlutterFragment} in the {@code #flutterFragment} field and avoid
* re-creating another {@link FlutterFragment}.
*/
@VisibleForTesting
FlutterFragment retrieveExistingFlutterFragmentIfPossible() {
FragmentManager fragmentManager = getSupportFragmentManager();
return (FlutterFragment) fragmentManager.findFragmentByTag(TAG_FLUTTER_FRAGMENT);
}
/**
* Ensure that a {@link FlutterFragment} is attached to this {@code FlutterFragmentActivity}.
*
* <p>If no {@link FlutterFragment} exists in this {@code FlutterFragmentActivity}, then a {@link
* FlutterFragment} is created and added.
*/
private void ensureFlutterFragmentCreated() {
if (flutterFragment == null) {
// If both activity and fragment have been destroyed, the activity restore may have
// already recreated a new instance of the fragment again via the FragmentActivity.onCreate
// and the FragmentManager.
flutterFragment = retrieveExistingFlutterFragmentIfPossible();
}
if (flutterFragment == null) {
// No FlutterFragment exists yet. This must be the initial Activity creation. We will create
// and add a new FlutterFragment to this Activity.
flutterFragment = createFlutterFragment();
FragmentManager fragmentManager = getSupportFragmentManager();
fragmentManager
.beginTransaction()
.add(FRAGMENT_CONTAINER_ID, flutterFragment, TAG_FLUTTER_FRAGMENT)
.commit();
}
}
/**
* Creates the instance of the {@link FlutterFragment} that this {@code FlutterFragmentActivity}
* displays.
*
* <p>Subclasses may override this method to return a specialization of {@link FlutterFragment}.
*/
@NonNull
protected FlutterFragment createFlutterFragment() {
final BackgroundMode backgroundMode = getBackgroundMode();
final RenderMode renderMode = getRenderMode();
final TransparencyMode transparencyMode =
backgroundMode == BackgroundMode.opaque
? TransparencyMode.opaque
: TransparencyMode.transparent;
final boolean shouldDelayFirstAndroidViewDraw = renderMode == RenderMode.surface;
if (getCachedEngineId() != null) {
Log.v(
TAG,
"Creating FlutterFragment with cached engine:\n"
+ "Cached engine ID: "
+ getCachedEngineId()
+ "\n"
+ "Will destroy engine when Activity is destroyed: "
+ shouldDestroyEngineWithHost()
+ "\n"
+ "Background transparency mode: "
+ backgroundMode
+ "\n"
+ "Will attach FlutterEngine to Activity: "
+ shouldAttachEngineToActivity());
return FlutterFragment.withCachedEngine(getCachedEngineId())
.renderMode(renderMode)
.transparencyMode(transparencyMode)
.handleDeeplinking(shouldHandleDeeplinking())
.shouldAttachEngineToActivity(shouldAttachEngineToActivity())
.destroyEngineWithFragment(shouldDestroyEngineWithHost())
.shouldDelayFirstAndroidViewDraw(shouldDelayFirstAndroidViewDraw)
.build();
} else {
Log.v(
TAG,
"Creating FlutterFragment with new engine:\n"
+ "Cached engine group ID: "
+ getCachedEngineGroupId()
+ "\n"
+ "Background transparency mode: "
+ backgroundMode
+ "\n"
+ "Dart entrypoint: "
+ getDartEntrypointFunctionName()
+ "\n"
+ "Dart entrypoint library uri: "
+ (getDartEntrypointLibraryUri() != null ? getDartEntrypointLibraryUri() : "\"\"")
+ "\n"
+ "Initial route: "
+ getInitialRoute()
+ "\n"
+ "App bundle path: "
+ getAppBundlePath()
+ "\n"
+ "Will attach FlutterEngine to Activity: "
+ shouldAttachEngineToActivity());
if (getCachedEngineGroupId() != null) {
return FlutterFragment.withNewEngineInGroup(getCachedEngineGroupId())
.dartEntrypoint(getDartEntrypointFunctionName())
.initialRoute(getInitialRoute())
.handleDeeplinking(shouldHandleDeeplinking())
.renderMode(renderMode)
.transparencyMode(transparencyMode)
.shouldAttachEngineToActivity(shouldAttachEngineToActivity())
.shouldDelayFirstAndroidViewDraw(shouldDelayFirstAndroidViewDraw)
.build();
}
return FlutterFragment.withNewEngine()
.dartEntrypoint(getDartEntrypointFunctionName())
.dartLibraryUri(getDartEntrypointLibraryUri())
.dartEntrypointArgs(getDartEntrypointArgs())
.initialRoute(getInitialRoute())
.appBundlePath(getAppBundlePath())
.flutterShellArgs(FlutterShellArgs.fromIntent(getIntent()))
.handleDeeplinking(shouldHandleDeeplinking())
.renderMode(renderMode)
.transparencyMode(transparencyMode)
.shouldAttachEngineToActivity(shouldAttachEngineToActivity())
.shouldDelayFirstAndroidViewDraw(shouldDelayFirstAndroidViewDraw)
.build();
}
}
private void configureStatusBarForFullscreenFlutterExperience() {
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
window.setStatusBarColor(0x40000000);
window.getDecorView().setSystemUiVisibility(PlatformPlugin.DEFAULT_SYSTEM_UI);
}
@Override
public void onPostResume() {
super.onPostResume();
flutterFragment.onPostResume();
}
@Override
protected void onNewIntent(@NonNull Intent intent) {
// Forward Intents to our FlutterFragment in case it cares.
flutterFragment.onNewIntent(intent);
super.onNewIntent(intent);
}
@Override
@SuppressWarnings("MissingSuperCall")
public void onBackPressed() {
flutterFragment.onBackPressed();
}
@Override
public void onRequestPermissionsResult(
int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
flutterFragment.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
@Override
@SuppressWarnings("MissingSuperCall")
public void onUserLeaveHint() {
flutterFragment.onUserLeaveHint();
}
@Override
public void onTrimMemory(int level) {
super.onTrimMemory(level);
flutterFragment.onTrimMemory(level);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
flutterFragment.onActivityResult(requestCode, resultCode, data);
}
@SuppressWarnings("unused")
@Nullable
protected FlutterEngine getFlutterEngine() {
return flutterFragment.getFlutterEngine();
}
/**
* Returns false if the {@link io.flutter.embedding.engine.FlutterEngine} backing this {@code
* FlutterFragmentActivity} should outlive this {@code FlutterFragmentActivity}, or true to be
* destroyed when the {@code FlutterFragmentActivity} is destroyed.
*
* <p>The default value is {@code true} in cases where {@code FlutterFragmentActivity} created its
* own {@link io.flutter.embedding.engine.FlutterEngine}, and {@code false} in cases where a
* cached {@link io.flutter.embedding.engine.FlutterEngine} was provided.
*/
public boolean shouldDestroyEngineWithHost() {
return getIntent().getBooleanExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, false);
}
/**
* Hook for subclasses to control whether or not the {@link FlutterFragment} within this {@code
* Activity} automatically attaches its {@link io.flutter.embedding.engine.FlutterEngine} to this
* {@code Activity}.
*
* <p>For an explanation of why this control exists, see {@link
* FlutterFragment.NewEngineFragmentBuilder#shouldAttachEngineToActivity()}.
*
* <p>This property is controlled with a protected method instead of an {@code Intent} argument
* because the only situation where changing this value would help, is a situation in which {@code
* FlutterFragmentActivity} is being subclassed to utilize a custom and/or cached {@link
* FlutterEngine}.
*
* <p>Defaults to {@code true}.
*/
protected boolean shouldAttachEngineToActivity() {
return true;
}
/**
* Whether to handle the deeplinking from the {@code Intent} automatically if the {@code
* getInitialRoute} returns null.
*
* <p>The default implementation looks {@code <meta-data>} called {@link
* FlutterActivityLaunchConfigs#HANDLE_DEEPLINKING_META_DATA_KEY} within the Android manifest
* definition for this {@code FlutterFragmentActivity}.
*/
@VisibleForTesting
protected boolean shouldHandleDeeplinking() {
try {
Bundle metaData = getMetaData();
boolean shouldHandleDeeplinking =
metaData != null ? metaData.getBoolean(HANDLE_DEEPLINKING_META_DATA_KEY) : false;
return shouldHandleDeeplinking;
} catch (PackageManager.NameNotFoundException e) {
return false;
}
}
/** Hook for subclasses to easily provide a custom {@code FlutterEngine}. */
@Nullable
@Override
public FlutterEngine provideFlutterEngine(@NonNull Context context) {
// No-op. Hook for subclasses.
return null;
}
/**
* Hook for subclasses to easily configure a {@code FlutterEngine}.
*
* <p>This method is called after {@link #provideFlutterEngine(Context)}.
*
* <p>All plugins listed in the app's pubspec are registered in the base implementation of this
* method unless the FlutterEngine for this activity was externally created. To avoid the
* automatic plugin registration for implicitly created FlutterEngines, override this method
* without invoking super(). To keep automatic plugin registration and further configure the
* FlutterEngine, override this method, invoke super(), and then configure the FlutterEngine as
* desired.
*/
@Override
public void configureFlutterEngine(@NonNull FlutterEngine flutterEngine) {
if (flutterFragment != null && flutterFragment.isFlutterEngineInjected()) {
// If the FlutterEngine was explicitly built and injected into this FlutterActivity, the
// builder should explicitly decide whether to automatically register plugins via the
// FlutterEngine's construction parameter or via the AndroidManifest metadata.
return;
}
GeneratedPluginRegister.registerGeneratedPlugins(flutterEngine);
}
/**
* Hook for the host to cleanup references that were established in {@link
* #configureFlutterEngine(FlutterEngine)} before the host is destroyed or detached.
*
* <p>This method is called in {@link #onDestroy()}.
*/
@Override
public void cleanUpFlutterEngine(@NonNull FlutterEngine flutterEngine) {
// No-op. Hook for subclasses.
}
/**
* A custom path to the bundle that contains this Flutter app's resources, e.g., Dart code
* snapshots.
*
* <p>When this {@code FlutterFragmentActivity} is run by Flutter tooling and a data String is
* included in the launching {@code Intent}, that data String is interpreted as an app bundle
* path.
*
* <p>When otherwise unspecified, the value is null, which defaults to the app bundle path defined
* in {@link io.flutter.embedding.engine.loader.FlutterLoader#findAppBundlePath()}.
*
* <p>Subclasses may override this method to return a custom app bundle path.
*/
@NonNull
protected String getAppBundlePath() {
// If this Activity was launched from tooling, and the incoming Intent contains
// a custom app bundle path, return that path.
// TODO(mattcarroll): determine if we should have an explicit FlutterTestActivity instead of
// conflating.
if (isDebuggable() && Intent.ACTION_RUN.equals(getIntent().getAction())) {
String appBundlePath = getIntent().getDataString();
if (appBundlePath != null) {
return appBundlePath;
}
}
return null;
}
/** Retrieves the meta data specified in the AndroidManifest.xml. */
@Nullable
protected Bundle getMetaData() throws PackageManager.NameNotFoundException {
ActivityInfo activityInfo =
getPackageManager().getActivityInfo(getComponentName(), PackageManager.GET_META_DATA);
return activityInfo.metaData;
}
/**
* The Dart entrypoint that will be executed as soon as the Dart snapshot is loaded.
*
* <p>This preference can be controlled by setting a {@code <meta-data>} called {@link
* FlutterActivityLaunchConfigs#DART_ENTRYPOINT_META_DATA_KEY} within the Android manifest
* definition for this {@code FlutterFragmentActivity}.
*
* <p>Subclasses may override this method to directly control the Dart entrypoint.
*/
@NonNull
public String getDartEntrypointFunctionName() {
try {
Bundle metaData = getMetaData();
String desiredDartEntrypoint =
metaData != null ? metaData.getString(DART_ENTRYPOINT_META_DATA_KEY) : null;
return desiredDartEntrypoint != null ? desiredDartEntrypoint : DEFAULT_DART_ENTRYPOINT;
} catch (PackageManager.NameNotFoundException e) {
return DEFAULT_DART_ENTRYPOINT;
}
}
/**
* The Dart entrypoint arguments will be passed as a list of string to Dart's entrypoint function.
*
* <p>A value of null means do not pass any arguments to Dart's entrypoint function.
*
* <p>Subclasses may override this method to directly control the Dart entrypoint arguments.
*/
@Nullable
public List<String> getDartEntrypointArgs() {
return (List<String>) getIntent().getSerializableExtra(EXTRA_DART_ENTRYPOINT_ARGS);
}
/**
* The Dart library URI for the entrypoint that will be executed as soon as the Dart snapshot is
* loaded.
*
* <p>Example value: "package:foo/bar.dart"
*
* <p>This preference can be controlled by setting a {@code <meta-data>} called {@link
* FlutterActivityLaunchConfigs#DART_ENTRYPOINT_URI_META_DATA_KEY} within the Android manifest
* definition for this {@code FlutterFragmentActivity}.
*
* <p>A value of null means use the default root library.
*
* <p>Subclasses may override this method to directly control the Dart entrypoint uri.
*/
@Nullable
public String getDartEntrypointLibraryUri() {
try {
Bundle metaData = getMetaData();
String desiredDartLibraryUri =
metaData != null ? metaData.getString(DART_ENTRYPOINT_URI_META_DATA_KEY) : null;
return desiredDartLibraryUri;
} catch (PackageManager.NameNotFoundException e) {
return null;
}
}
/**
* The initial route that a Flutter app will render upon loading and executing its Dart code.
*
* <p>This preference can be controlled with 2 methods:
*
* <ol>
* <li>Pass a boolean as {@link FlutterActivityLaunchConfigs#EXTRA_INITIAL_ROUTE} with the
* launching {@code Intent}, or
* <li>Set a {@code <meta-data>} called {@link
* FlutterActivityLaunchConfigs#INITIAL_ROUTE_META_DATA_KEY} for this {@code Activity} in
* the Android manifest.
* </ol>
*
* If both preferences are set, the {@code Intent} preference takes priority.
*
* <p>The reason that a {@code <meta-data>} preference is supported is because this {@code
* Activity} might be the very first {@code Activity} launched, which means the developer won't
* have control over the incoming {@code Intent}.
*
* <p>Subclasses may override this method to directly control the initial route.
*
* <p>If this method returns null and the {@code shouldHandleDeeplinking} returns true, the
* initial route is derived from the {@code Intent} through the Intent.getData() instead.
*/
protected String getInitialRoute() {
if (getIntent().hasExtra(EXTRA_INITIAL_ROUTE)) {
return getIntent().getStringExtra(EXTRA_INITIAL_ROUTE);
}
try {
Bundle metaData = getMetaData();
String desiredInitialRoute =
metaData != null ? metaData.getString(INITIAL_ROUTE_META_DATA_KEY) : null;
return desiredInitialRoute;
} catch (PackageManager.NameNotFoundException e) {
return null;
}
}
/**
* Returns the ID of a statically cached {@link io.flutter.embedding.engine.FlutterEngine} to use
* within this {@code FlutterFragmentActivity}, or {@code null} if this {@code
* FlutterFragmentActivity} does not want to use a cached {@link
* io.flutter.embedding.engine.FlutterEngine}.
*/
@Nullable
protected String getCachedEngineId() {
return getIntent().getStringExtra(EXTRA_CACHED_ENGINE_ID);
}
@Nullable
protected String getCachedEngineGroupId() {
return getIntent().getStringExtra(EXTRA_CACHED_ENGINE_GROUP_ID);
}
/**
* The desired window background mode of this {@code Activity}, which defaults to {@link
* BackgroundMode#opaque}.
*/
@NonNull
protected BackgroundMode getBackgroundMode() {
if (getIntent().hasExtra(EXTRA_BACKGROUND_MODE)) {
return BackgroundMode.valueOf(getIntent().getStringExtra(EXTRA_BACKGROUND_MODE));
} else {
return BackgroundMode.opaque;
}
}
/**
* Returns the desired {@link RenderMode} for the {@link FlutterView} displayed in this {@code
* FlutterFragmentActivity}.
*
* <p>That is, {@link RenderMode#surface} if {@link FlutterFragmentActivity#getBackgroundMode()}
* is {@link BackgroundMode#opaque} or {@link RenderMode#texture} otherwise.
*/
@NonNull
protected RenderMode getRenderMode() {
final BackgroundMode backgroundMode = getBackgroundMode();
return backgroundMode == BackgroundMode.opaque ? RenderMode.surface : RenderMode.texture;
}
/**
* Returns true if Flutter is running in "debug mode", and false otherwise.
*
* <p>Debug mode allows Flutter to operate with hot reload and hot restart. Release mode does not.
*/
private boolean isDebuggable() {
return (getApplicationInfo().flags & ApplicationInfo.FLAG_DEBUGGABLE) != 0;
}
/** Returns a {@link FrameLayout} that is used as the content view of this activity. */
@NonNull
protected FrameLayout provideRootLayout(Context context) {
return new FrameLayout(context);
}
}
| engine/shell/platform/android/io/flutter/embedding/android/FlutterFragmentActivity.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/android/FlutterFragmentActivity.java",
"repo_id": "engine",
"token_count": 13296
} | 335 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import java.util.HashMap;
import java.util.Map;
/**
* Static singleton cache that holds {@link io.flutter.embedding.engine.FlutterEngine} instances
* identified by {@code String}s.
*
* <p>The ID of a given {@link io.flutter.embedding.engine.FlutterEngine} can be whatever {@code
* String} is desired.
*
* <p>{@code FlutterEngineCache} is useful for storing pre-warmed {@link
* io.flutter.embedding.engine.FlutterEngine} instances. {@link
* io.flutter.embedding.android.FlutterActivity} and {@link
* io.flutter.embedding.android.FlutterFragment} use the {@code FlutterEngineCache} singleton
* internally when instructed to use a cached {@link io.flutter.embedding.engine.FlutterEngine}
* based on a given ID. See {@link
* io.flutter.embedding.android.FlutterActivity.CachedEngineIntentBuilder} and {@link
* io.flutter.embedding.android.FlutterFragment#withCachedEngine(String)} for related APIs.
*/
public class FlutterEngineCache {
private static FlutterEngineCache instance;
/**
* Returns the static singleton instance of {@code FlutterEngineCache}.
*
* <p>Creates a new instance if one does not yet exist.
*/
@NonNull
public static FlutterEngineCache getInstance() {
if (instance == null) {
instance = new FlutterEngineCache();
}
return instance;
}
private final Map<String, FlutterEngine> cachedEngines = new HashMap<>();
@VisibleForTesting
/* package */ FlutterEngineCache() {}
/**
* Returns {@code true} if a {@link io.flutter.embedding.engine.FlutterEngine} in this cache is
* associated with the given {@code engineId}.
*/
public boolean contains(@NonNull String engineId) {
return cachedEngines.containsKey(engineId);
}
/**
* Returns the {@link io.flutter.embedding.engine.FlutterEngine} in this cache that is associated
* with the given {@code engineId}, or {@code null} is no such {@link
* io.flutter.embedding.engine.FlutterEngine} exists.
*/
@Nullable
public FlutterEngine get(@NonNull String engineId) {
return cachedEngines.get(engineId);
}
/**
* Places the given {@link io.flutter.embedding.engine.FlutterEngine} in this cache and associates
* it with the given {@code engineId}.
*
* <p>If a {@link io.flutter.embedding.engine.FlutterEngine} already exists in this cache for the
* given {@code engineId}, that {@link io.flutter.embedding.engine.FlutterEngine} is removed from
* this cache.
*/
public void put(@NonNull String engineId, @Nullable FlutterEngine engine) {
if (engine != null) {
cachedEngines.put(engineId, engine);
} else {
cachedEngines.remove(engineId);
}
}
/**
* Removes any {@link io.flutter.embedding.engine.FlutterEngine} that is currently in the cache
* that is identified by the given {@code engineId}.
*/
public void remove(@NonNull String engineId) {
put(engineId, null);
}
/**
* Removes all {@link io.flutter.embedding.engine.FlutterEngine}'s that are currently in the
* cache.
*/
public void clear() {
cachedEngines.clear();
}
}
| engine/shell/platform/android/io/flutter/embedding/engine/FlutterEngineCache.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/FlutterEngineCache.java",
"repo_id": "engine",
"token_count": 1103
} | 336 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.loader;
import static io.flutter.Build.API_LEVELS;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.os.AsyncTask;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.WorkerThread;
import io.flutter.BuildConfig;
import io.flutter.Log;
import java.io.*;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
/** A class to initialize the native code. */
class ResourceExtractor {
private static final String TAG = "ResourceExtractor";
private static final String TIMESTAMP_PREFIX = "res_timestamp-";
private static final String[] SUPPORTED_ABIS = Build.SUPPORTED_ABIS;
@SuppressWarnings("deprecation")
static long getVersionCode(@NonNull PackageInfo packageInfo) {
// Linter needs P (28) hardcoded or else it will fail these lines.
if (Build.VERSION.SDK_INT >= API_LEVELS.API_28) {
return packageInfo.getLongVersionCode();
} else {
return packageInfo.versionCode;
}
}
private static class ExtractTask extends AsyncTask<Void, Void, Void> {
@NonNull private final String mDataDirPath;
@NonNull private final HashSet<String> mResources;
@NonNull private final AssetManager mAssetManager;
@NonNull private final String mPackageName;
@NonNull private final PackageManager mPackageManager;
ExtractTask(
@NonNull String dataDirPath,
@NonNull HashSet<String> resources,
@NonNull String packageName,
@NonNull PackageManager packageManager,
@NonNull AssetManager assetManager) {
mDataDirPath = dataDirPath;
mResources = resources;
mAssetManager = assetManager;
mPackageName = packageName;
mPackageManager = packageManager;
}
@Override
protected Void doInBackground(Void... unused) {
final File dataDir = new File(mDataDirPath);
final String timestamp = checkTimestamp(dataDir, mPackageManager, mPackageName);
if (timestamp == null) {
return null;
}
deleteFiles(mDataDirPath, mResources);
if (!extractAPK(dataDir)) {
return null;
}
if (timestamp != null) {
try {
new File(dataDir, timestamp).createNewFile();
} catch (IOException e) {
Log.w(TAG, "Failed to write resource timestamp");
}
}
return null;
}
/// Returns true if successfully unpacked APK resources,
/// otherwise deletes all resources and returns false.
@WorkerThread
private boolean extractAPK(@NonNull File dataDir) {
for (String asset : mResources) {
try {
final String resource = "assets/" + asset;
final File output = new File(dataDir, asset);
if (output.exists()) {
continue;
}
if (output.getParentFile() != null) {
output.getParentFile().mkdirs();
}
try (InputStream is = mAssetManager.open(asset);
OutputStream os = new FileOutputStream(output)) {
copy(is, os);
}
if (BuildConfig.DEBUG) {
Log.i(TAG, "Extracted baseline resource " + resource);
}
} catch (FileNotFoundException fnfe) {
continue;
} catch (IOException ioe) {
Log.w(TAG, "Exception unpacking resources: " + ioe.getMessage());
deleteFiles(mDataDirPath, mResources);
return false;
}
}
return true;
}
}
@NonNull private final String mDataDirPath;
@NonNull private final String mPackageName;
@NonNull private final PackageManager mPackageManager;
@NonNull private final AssetManager mAssetManager;
@NonNull private final HashSet<String> mResources;
private ExtractTask mExtractTask;
ResourceExtractor(
@NonNull String dataDirPath,
@NonNull String packageName,
@NonNull PackageManager packageManager,
@NonNull AssetManager assetManager) {
mDataDirPath = dataDirPath;
mPackageName = packageName;
mPackageManager = packageManager;
mAssetManager = assetManager;
mResources = new HashSet<>();
}
ResourceExtractor addResource(@NonNull String resource) {
mResources.add(resource);
return this;
}
ResourceExtractor addResources(@NonNull Collection<String> resources) {
mResources.addAll(resources);
return this;
}
ResourceExtractor start() {
if (BuildConfig.DEBUG && mExtractTask != null) {
Log.e(
TAG, "Attempted to start resource extraction while another extraction was in progress.");
}
mExtractTask =
new ExtractTask(mDataDirPath, mResources, mPackageName, mPackageManager, mAssetManager);
mExtractTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
return this;
}
void waitForCompletion() {
if (mExtractTask == null) {
return;
}
try {
mExtractTask.get();
} catch (CancellationException | ExecutionException | InterruptedException e) {
deleteFiles(mDataDirPath, mResources);
}
}
private static String[] getExistingTimestamps(File dataDir) {
return dataDir.list(
new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith(TIMESTAMP_PREFIX);
}
});
}
private static void deleteFiles(@NonNull String dataDirPath, @NonNull HashSet<String> resources) {
final File dataDir = new File(dataDirPath);
for (String resource : resources) {
final File file = new File(dataDir, resource);
if (file.exists()) {
file.delete();
}
}
final String[] existingTimestamps = getExistingTimestamps(dataDir);
if (existingTimestamps == null) {
return;
}
for (String timestamp : existingTimestamps) {
new File(dataDir, timestamp).delete();
}
}
// Returns null if extracted resources are found and match the current APK version
// and update version if any, otherwise returns the current APK and update version.
private static String checkTimestamp(
@NonNull File dataDir, @NonNull PackageManager packageManager, @NonNull String packageName) {
PackageInfo packageInfo = null;
try {
packageInfo = packageManager.getPackageInfo(packageName, 0);
} catch (PackageManager.NameNotFoundException e) {
return TIMESTAMP_PREFIX;
}
if (packageInfo == null) {
return TIMESTAMP_PREFIX;
}
String expectedTimestamp =
TIMESTAMP_PREFIX + getVersionCode(packageInfo) + "-" + packageInfo.lastUpdateTime;
final String[] existingTimestamps = getExistingTimestamps(dataDir);
if (existingTimestamps == null) {
if (BuildConfig.DEBUG) {
Log.i(TAG, "No extracted resources found");
}
return expectedTimestamp;
}
if (existingTimestamps.length == 1) {
if (BuildConfig.DEBUG) {
Log.i(TAG, "Found extracted resources " + existingTimestamps[0]);
}
}
if (existingTimestamps.length != 1 || !expectedTimestamp.equals(existingTimestamps[0])) {
if (BuildConfig.DEBUG) {
Log.i(TAG, "Resource version mismatch " + expectedTimestamp);
}
return expectedTimestamp;
}
return null;
}
private static void copy(@NonNull InputStream in, @NonNull OutputStream out) throws IOException {
byte[] buf = new byte[16 * 1024];
for (int i; (i = in.read(buf)) >= 0; ) {
out.write(buf, 0, i);
}
}
}
| engine/shell/platform/android/io/flutter/embedding/engine/loader/ResourceExtractor.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/loader/ResourceExtractor.java",
"repo_id": "engine",
"token_count": 2864
} | 337 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.plugins.service;
import android.app.Service;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.lifecycle.Lifecycle;
/**
* Control surface through which a {@link Service} attaches to a {@link
* io.flutter.embedding.engine.FlutterEngine}.
*
* <p>A {@link Service} that contains a {@link io.flutter.embedding.engine.FlutterEngine} should
* coordinate itself with the {@link io.flutter.embedding.engine.FlutterEngine}'s {@code
* ServiceControlSurface}.
*/
public interface ServiceControlSurface {
/**
* Call this method from the {@link Service} that is running the {@link
* io.flutter.embedding.engine.FlutterEngine} that is associated with this {@code
* ServiceControlSurface}.
*
* <p>Once a {@link Service} is created, and its associated {@link
* io.flutter.embedding.engine.FlutterEngine} is executing Dart code, the {@link Service} should
* invoke this method. At that point the {@link io.flutter.embedding.engine.FlutterEngine} is
* considered "attached" to the {@link Service} and all {@link ServiceAware} plugins are given
* access to the {@link Service}.
*
* <p>{@code isForeground} should be true if the given {@link Service} is running in the
* foreground, false otherwise.
*/
void attachToService(
@NonNull Service service, @Nullable Lifecycle lifecycle, boolean isForeground);
/**
* Call this method from the {@link Service} that is attached to this {@code
* ServiceControlSurfaces}'s {@link io.flutter.embedding.engine.FlutterEngine} when the {@link
* Service} is about to be destroyed.
*
* <p>This method gives each {@link ServiceAware} plugin an opportunity to clean up its references
* before the {@link Service is destroyed}.
*/
void detachFromService();
/**
* Call this method from the {@link Service} that is attached to this {@code
* ServiceControlSurface}'s {@link io.flutter.embedding.engine.FlutterEngine} when the {@link
* Service} goes from background to foreground.
*/
void onMoveToForeground();
/**
* Call this method from the {@link Service} that is attached to this {@code
* ServiceControlSurface}'s {@link io.flutter.embedding.engine.FlutterEngine} when the {@link
* Service} goes from foreground to background.
*/
void onMoveToBackground();
}
| engine/shell/platform/android/io/flutter/embedding/engine/plugins/service/ServiceControlSurface.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/plugins/service/ServiceControlSurface.java",
"repo_id": "engine",
"token_count": 772
} | 338 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.systemchannels;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import io.flutter.Log;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.StandardMethodCodec;
import java.util.HashMap;
/** System channel that receives requests for mouse cursor behavior, e.g., set as system cursors. */
public class MouseCursorChannel {
private static final String TAG = "MouseCursorChannel";
@NonNull public final MethodChannel channel;
@Nullable private MouseCursorMethodHandler mouseCursorMethodHandler;
public MouseCursorChannel(@NonNull DartExecutor dartExecutor) {
channel = new MethodChannel(dartExecutor, "flutter/mousecursor", StandardMethodCodec.INSTANCE);
channel.setMethodCallHandler(parsingMethodCallHandler);
}
/**
* Sets the {@link MouseCursorMethodHandler} which receives all events and requests that are
* parsed from the underlying platform channel.
*/
public void setMethodHandler(@Nullable MouseCursorMethodHandler mouseCursorMethodHandler) {
this.mouseCursorMethodHandler = mouseCursorMethodHandler;
}
@NonNull
private final MethodChannel.MethodCallHandler parsingMethodCallHandler =
new MethodChannel.MethodCallHandler() {
@Override
public void onMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result result) {
if (mouseCursorMethodHandler == null) {
// If no explicit mouseCursorMethodHandler has been registered then we don't
// need to forward this call to an API. Return.
return;
}
final String method = call.method;
Log.v(TAG, "Received '" + method + "' message.");
try {
// More methods are expected to be added here, hence the switch.
switch (method) {
case "activateSystemCursor":
@SuppressWarnings("unchecked")
final HashMap<String, Object> data = (HashMap<String, Object>) call.arguments;
final String kind = (String) data.get("kind");
try {
mouseCursorMethodHandler.activateSystemCursor(kind);
} catch (Exception e) {
result.error("error", "Error when setting cursors: " + e.getMessage(), null);
break;
}
result.success(true);
break;
default:
}
} catch (Exception e) {
result.error("error", "Unhandled error: " + e.getMessage(), null);
}
}
};
@VisibleForTesting
public void synthesizeMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result result) {
parsingMethodCallHandler.onMethodCall(call, result);
}
public interface MouseCursorMethodHandler {
// Called when the pointer should start displaying a system mouse cursor
// specified by {@code shapeCode}.
public void activateSystemCursor(@NonNull String kind);
}
}
| engine/shell/platform/android/io/flutter/embedding/engine/systemchannels/MouseCursorChannel.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/systemchannels/MouseCursorChannel.java",
"repo_id": "engine",
"token_count": 1204
} | 339 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.common;
import io.flutter.BuildConfig;
import io.flutter.Log;
/** Thrown to indicate that a Flutter method invocation failed on the Flutter side. */
public class FlutterException extends RuntimeException {
private static final String TAG = "FlutterException#";
public final String code;
public final Object details;
FlutterException(String code, String message, Object details) {
super(message);
if (BuildConfig.DEBUG && code == null) {
Log.e(TAG, "Parameter code must not be null.");
}
this.code = code;
this.details = details;
}
}
| engine/shell/platform/android/io/flutter/plugin/common/FlutterException.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/plugin/common/FlutterException.java",
"repo_id": "engine",
"token_count": 223
} | 340 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.editing;
import android.view.textservice.SentenceSuggestionsInfo;
import android.view.textservice.SpellCheckerSession;
import android.view.textservice.SuggestionsInfo;
import android.view.textservice.TextInfo;
import android.view.textservice.TextServicesManager;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import io.flutter.embedding.engine.systemchannels.SpellCheckChannel;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.localization.LocalizationPlugin;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Locale;
/**
* {@link SpellCheckPlugin} is the implementation of all functionality needed for spell check for
* text input.
*
* <p>The plugin handles requests for spell check sent by the {@link
* io.flutter.embedding.engine.systemchannels.SpellCheckChannel} via sending requests to the Android
* spell checker. It also receives the spell check results from the service and sends them back to
* the framework through the {@link io.flutter.embedding.engine.systemchannels.SpellCheckChannel}.
*/
public class SpellCheckPlugin
implements SpellCheckChannel.SpellCheckMethodHandler,
SpellCheckerSession.SpellCheckerSessionListener {
private final SpellCheckChannel mSpellCheckChannel;
private final TextServicesManager mTextServicesManager;
private SpellCheckerSession mSpellCheckerSession;
public static final String START_INDEX_KEY = "startIndex";
public static final String END_INDEX_KEY = "endIndex";
public static final String SUGGESTIONS_KEY = "suggestions";
@VisibleForTesting MethodChannel.Result pendingResult;
// The maximum number of suggestions that the Android spell check service is allowed to provide
// per word. Same number that is used by default for Android's TextViews.
private static final int MAX_SPELL_CHECK_SUGGESTIONS = 5;
public SpellCheckPlugin(
@NonNull TextServicesManager textServicesManager,
@NonNull SpellCheckChannel spellCheckChannel) {
mTextServicesManager = textServicesManager;
mSpellCheckChannel = spellCheckChannel;
mSpellCheckChannel.setSpellCheckMethodHandler(this);
}
/**
* Unregisters this {@code SpellCheckPlugin} as the {@code
* SpellCheckChannel.SpellCheckMethodHandler}, for the {@link
* io.flutter.embedding.engine.systemchannels.SpellCheckChannel}, and closes the most recently
* opened {@code SpellCheckerSession}.
*
* <p>Do not invoke any methods on a {@code SpellCheckPlugin} after invoking this method.
*/
public void destroy() {
mSpellCheckChannel.setSpellCheckMethodHandler(null);
if (mSpellCheckerSession != null) {
mSpellCheckerSession.close();
}
}
/**
* Initiates call to native spell checker to spell check specified text if there is no result
* awaiting a response.
*/
@Override
public void initiateSpellCheck(
@NonNull String locale, @NonNull String text, @NonNull MethodChannel.Result result) {
if (pendingResult != null) {
result.error("error", "Previous spell check request still pending.", null);
return;
}
pendingResult = result;
performSpellCheck(locale, text);
}
/** Calls on the Android spell check API to spell check specified text. */
public void performSpellCheck(@NonNull String locale, @NonNull String text) {
Locale localeFromString = LocalizationPlugin.localeFromString(locale);
if (mSpellCheckerSession == null) {
mSpellCheckerSession =
mTextServicesManager.newSpellCheckerSession(
null,
localeFromString,
this,
/** referToSpellCheckerLanguageSettings= */
true);
}
TextInfo[] textInfos = new TextInfo[] {new TextInfo(text)};
mSpellCheckerSession.getSentenceSuggestions(textInfos, MAX_SPELL_CHECK_SUGGESTIONS);
}
/**
* Callback for Android spell check API that decomposes results and send results through the
* {@link SpellCheckChannel}.
*
* <p>Spell check results are encoded as dictionaries with a format that looks like
*
* <pre>{@code
* {
* startIndex: 0,
* endIndex: 5,
* suggestions: [hello, ...]
* }
* }</pre>
*
* where there may be up to 5 suggestions.
*/
@Override
public void onGetSentenceSuggestions(SentenceSuggestionsInfo[] results) {
if (results.length == 0) {
pendingResult.success(new ArrayList<HashMap<String, Object>>());
pendingResult = null;
return;
}
ArrayList<HashMap<String, Object>> spellCheckerSuggestionSpans =
new ArrayList<HashMap<String, Object>>();
SentenceSuggestionsInfo spellCheckResults = results[0];
if (spellCheckResults == null) {
pendingResult.success(new ArrayList<HashMap<String, Object>>());
pendingResult = null;
return;
}
for (int i = 0; i < spellCheckResults.getSuggestionsCount(); i++) {
SuggestionsInfo suggestionsInfo = spellCheckResults.getSuggestionsInfoAt(i);
int suggestionsCount = suggestionsInfo.getSuggestionsCount();
if (suggestionsCount <= 0) {
continue;
}
HashMap<String, Object> spellCheckerSuggestionSpan = new HashMap<String, Object>();
int start = spellCheckResults.getOffsetAt(i);
int end = start + spellCheckResults.getLengthAt(i);
spellCheckerSuggestionSpan.put(START_INDEX_KEY, start);
spellCheckerSuggestionSpan.put(END_INDEX_KEY, end);
ArrayList<String> suggestions = new ArrayList<String>();
boolean validSuggestionsFound = false;
for (int j = 0; j < suggestionsCount; j++) {
String suggestion = suggestionsInfo.getSuggestionAt(j);
// TODO(camsim99): Support spell check on Samsung by retrieving accurate spell check
// results, then remove this check: https://github.com/flutter/flutter/issues/120608.
if (!suggestion.equals("")) {
validSuggestionsFound = true;
suggestions.add(suggestion);
}
}
if (!validSuggestionsFound) {
continue;
}
spellCheckerSuggestionSpan.put(SUGGESTIONS_KEY, suggestions);
spellCheckerSuggestionSpans.add(spellCheckerSuggestionSpan);
}
pendingResult.success(spellCheckerSuggestionSpans);
pendingResult = null;
}
@Override
public void onGetSuggestions(SuggestionsInfo[] results) {
// Deprecated callback for Android spell check API; will not use.
}
}
| engine/shell/platform/android/io/flutter/plugin/editing/SpellCheckPlugin.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/plugin/editing/SpellCheckPlugin.java",
"repo_id": "engine",
"token_count": 2154
} | 341 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.platform;
import static android.view.MotionEvent.PointerCoords;
import static android.view.MotionEvent.PointerProperties;
import static io.flutter.Build.API_LEVELS;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.MutableContextWrapper;
import android.os.Build;
import android.util.SparseArray;
import android.view.MotionEvent;
import android.view.MotionEvent.PointerCoords;
import android.view.MotionEvent.PointerProperties;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.UiThread;
import androidx.annotation.VisibleForTesting;
import io.flutter.Log;
import io.flutter.embedding.android.AndroidTouchProcessor;
import io.flutter.embedding.android.FlutterView;
import io.flutter.embedding.android.MotionEventTracker;
import io.flutter.embedding.engine.FlutterOverlaySurface;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.embedding.engine.mutatorsstack.*;
import io.flutter.embedding.engine.renderer.FlutterRenderer;
import io.flutter.embedding.engine.systemchannels.PlatformViewsChannel;
import io.flutter.plugin.editing.TextInputPlugin;
import io.flutter.util.ViewUtils;
import io.flutter.view.AccessibilityBridge;
import io.flutter.view.TextureRegistry;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
/**
* Manages platform views.
*
* <p>Each {@link io.flutter.embedding.engine.FlutterEngine} or {@link
* io.flutter.app.FlutterPluginRegistry} has a single platform views controller. A platform views
* controller can be attached to at most one Flutter view.
*/
public class PlatformViewsController implements PlatformViewsAccessibilityDelegate {
private static final String TAG = "PlatformViewsController";
// These view types allow out-of-band drawing commands that don't notify the Android view
// hierarchy.
// To support these cases, Flutter hosts the embedded view in a VirtualDisplay,
// and binds the VirtualDisplay to a GL texture that is then composed by the engine.
// However, there are a few issues with Virtual Displays. For example, they don't fully support
// accessibility due to https://github.com/flutter/flutter/issues/29717,
// and keyboard interactions may have non-deterministic behavior.
// Views that issue out-of-band drawing commands that aren't included in this array are
// required to call `View#invalidate()` to notify Flutter about the update.
// This isn't ideal, but given all the other limitations it's a reasonable tradeoff.
// Related issue: https://github.com/flutter/flutter/issues/103630
private static Class[] VIEW_TYPES_REQUIRE_VIRTUAL_DISPLAY = {SurfaceView.class};
private final PlatformViewRegistryImpl registry;
private AndroidTouchProcessor androidTouchProcessor;
// The context of the Activity or Fragment hosting the render target for the Flutter engine.
private Context context;
// The View currently rendering the Flutter UI associated with these platform views.
private FlutterView flutterView;
// The texture registry maintaining the textures into which the embedded views will be rendered.
@Nullable private TextureRegistry textureRegistry;
@Nullable private TextInputPlugin textInputPlugin;
// The system channel used to communicate with the framework about platform views.
private PlatformViewsChannel platformViewsChannel;
// The accessibility bridge to which accessibility events form the platform views will be
// dispatched.
private final AccessibilityEventsDelegate accessibilityEventsDelegate;
// TODO(mattcarroll): Refactor overall platform views to facilitate testing and then make
// this private. This is visible as a hack to facilitate testing. This was deemed the least
// bad option at the time of writing.
@VisibleForTesting /* package */ final HashMap<Integer, VirtualDisplayController> vdControllers;
// Maps a virtual display's context to the embedded view hosted in this virtual display.
// Since each virtual display has it's unique context this allows associating any view with the
// platform view that
// it is associated with(e.g if a platform view creates other views in the same virtual display.
@VisibleForTesting /* package */ final HashMap<Context, View> contextToEmbeddedView;
// The platform views.
private final SparseArray<PlatformView> platformViews;
// The platform view wrappers that are appended to FlutterView.
//
// These platform views use a PlatformViewLayer in the framework. This is different than
// the platform views that use a TextureLayer.
//
// This distinction is necessary because a PlatformViewLayer allows to embed Android's
// SurfaceViews in a Flutter app whereas the texture layer is unable to support such native views.
//
// If an entry in `platformViews` doesn't have an entry in this array, the platform view isn't
// in the view hierarchy.
//
// This view provides a wrapper that applies scene builder operations to the platform view.
// For example, a transform matrix, or setting opacity on the platform view layer.
private final SparseArray<FlutterMutatorView> platformViewParent;
// Map of unique IDs to views that render overlay layers.
private final SparseArray<PlatformOverlayView> overlayLayerViews;
// The platform view wrappers that are appended to FlutterView.
//
// These platform views use a TextureLayer in the framework. This is different than
// the platform views that use a PlatformViewLayer.
//
// This is the default mode, and recommended for better performance.
private final SparseArray<PlatformViewWrapper> viewWrappers;
// Next available unique ID for use in overlayLayerViews.
private int nextOverlayLayerId = 0;
// Tracks whether the flutterView has been converted to use a FlutterImageView.
private boolean flutterViewConvertedToImageView = false;
// When adding platform views using Hybrid Composition, the engine converts the render surface
// to a FlutterImageView to help improve animation synchronization on Android. This flag allows
// disabling this conversion through the PlatformView platform channel.
private boolean synchronizeToNativeViewHierarchy = true;
// Overlay layer IDs that were displayed since the start of the current frame.
private final HashSet<Integer> currentFrameUsedOverlayLayerIds;
// Platform view IDs that were displayed since the start of the current frame.
private final HashSet<Integer> currentFrameUsedPlatformViewIds;
// Used to acquire the original motion events using the motionEventIds.
private final MotionEventTracker motionEventTracker;
// Whether software rendering is used.
private boolean usesSoftwareRendering = false;
private static boolean enableImageRenderTarget = true;
private static boolean enableSurfaceProducerRenderTarget = true;
private final PlatformViewsChannel.PlatformViewsHandler channelHandler =
new PlatformViewsChannel.PlatformViewsHandler() {
@Override
// TODO(egarciad): Remove the need for this.
// https://github.com/flutter/flutter/issues/96679
public void createForPlatformViewLayer(
@NonNull PlatformViewsChannel.PlatformViewCreationRequest request) {
// API level 19 is required for `android.graphics.ImageReader`.
enforceMinimumAndroidApiVersion(19);
ensureValidRequest(request);
final PlatformView platformView = createPlatformView(request, false);
configureForHybridComposition(platformView, request);
// New code should be added to configureForHybridComposition, not here, unless it is
// not applicable to fallback from TLHC to HC.
}
@Override
public long createForTextureLayer(
@NonNull PlatformViewsChannel.PlatformViewCreationRequest request) {
ensureValidRequest(request);
final int viewId = request.viewId;
if (viewWrappers.get(viewId) != null) {
throw new IllegalStateException(
"Trying to create an already created platform view, view id: " + viewId);
}
if (textureRegistry == null) {
throw new IllegalStateException(
"Texture registry is null. This means that platform views controller was detached,"
+ " view id: "
+ viewId);
}
if (flutterView == null) {
throw new IllegalStateException(
"Flutter view is null. This means the platform views controller doesn't have an"
+ " attached view, view id: "
+ viewId);
}
final PlatformView platformView = createPlatformView(request, true);
final View embeddedView = platformView.getView();
if (embeddedView.getParent() != null) {
throw new IllegalStateException(
"The Android view returned from PlatformView#getView() was already added to a"
+ " parent view.");
}
// The newer Texture Layer Hybrid Composition mode isn't suppported if any of the
// following are true:
// - The embedded view contains any of the VIEW_TYPES_REQUIRE_VIRTUAL_DISPLAY view types.
// These views allow out-of-band graphics operations that aren't notified to the Android
// view hierarchy via callbacks such as ViewParent#onDescendantInvalidated().
// - The API level is <23, due to TLHC implementation API requirements.
final boolean supportsTextureLayerMode =
Build.VERSION.SDK_INT >= API_LEVELS.API_23
&& !ViewUtils.hasChildViewOfType(
embeddedView, VIEW_TYPES_REQUIRE_VIRTUAL_DISPLAY);
// Fall back to Hybrid Composition or Virtual Display when necessary, depending on which
// fallback mode is requested.
if (!supportsTextureLayerMode) {
if (request.displayMode
== PlatformViewsChannel.PlatformViewCreationRequest.RequestedDisplayMode
.TEXTURE_WITH_HYBRID_FALLBACK) {
configureForHybridComposition(platformView, request);
return PlatformViewsChannel.PlatformViewsHandler.NON_TEXTURE_FALLBACK;
} else if (!usesSoftwareRendering) { // Virtual Display doesn't support software mode.
return configureForVirtualDisplay(platformView, request);
}
// TODO(stuartmorgan): Consider throwing a specific exception here as a breaking change.
// For now, preserve the 3.0 behavior of falling through to Texture Layer mode even
// though it won't work correctly.
}
return configureForTextureLayerComposition(platformView, request);
}
@Override
public void dispose(int viewId) {
final PlatformView platformView = platformViews.get(viewId);
if (platformView == null) {
Log.e(TAG, "Disposing unknown platform view with id: " + viewId);
return;
}
if (platformView.getView() != null) {
final View embeddedView = platformView.getView();
final ViewGroup pvParent = (ViewGroup) embeddedView.getParent();
if (pvParent != null) {
// Eagerly remove the embedded view from the PlatformViewWrapper.
// Without this call, we see some crashes because removing the view
// is used as a signal to stop processing.
pvParent.removeView(embeddedView);
}
}
platformViews.remove(viewId);
try {
platformView.dispose();
} catch (RuntimeException exception) {
Log.e(TAG, "Disposing platform view threw an exception", exception);
}
if (usesVirtualDisplay(viewId)) {
final VirtualDisplayController vdController = vdControllers.get(viewId);
final View embeddedView = vdController.getView();
if (embeddedView != null) {
contextToEmbeddedView.remove(embeddedView.getContext());
}
vdController.dispose();
vdControllers.remove(viewId);
return;
}
// The platform view is displayed using a TextureLayer and is inserted in the view
// hierarchy.
final PlatformViewWrapper viewWrapper = viewWrappers.get(viewId);
if (viewWrapper != null) {
viewWrapper.removeAllViews();
viewWrapper.release();
viewWrapper.unsetOnDescendantFocusChangeListener();
final ViewGroup wrapperParent = (ViewGroup) viewWrapper.getParent();
if (wrapperParent != null) {
wrapperParent.removeView(viewWrapper);
}
viewWrappers.remove(viewId);
return;
}
// The platform view is displayed using a PlatformViewLayer.
final FlutterMutatorView parentView = platformViewParent.get(viewId);
if (parentView != null) {
parentView.removeAllViews();
parentView.unsetOnDescendantFocusChangeListener();
final ViewGroup mutatorViewParent = (ViewGroup) parentView.getParent();
if (mutatorViewParent != null) {
mutatorViewParent.removeView(parentView);
}
platformViewParent.remove(viewId);
}
}
@Override
public void offset(int viewId, double top, double left) {
if (usesVirtualDisplay(viewId)) {
// Virtual displays don't need an accessibility offset.
return;
}
// For platform views that use TextureView and are in the view hierarchy, set
// an offset to the wrapper view.
// This ensures that the accessibility highlights are drawn in the expected position on
// screen.
// This offset doesn't affect the position of the embeded view by itself since the GL
// texture is positioned by the Flutter engine, which knows where to position different
// types of layers.
final PlatformViewWrapper viewWrapper = viewWrappers.get(viewId);
if (viewWrapper == null) {
Log.e(TAG, "Setting offset for unknown platform view with id: " + viewId);
return;
}
final int physicalTop = toPhysicalPixels(top);
final int physicalLeft = toPhysicalPixels(left);
final FrameLayout.LayoutParams layoutParams =
(FrameLayout.LayoutParams) viewWrapper.getLayoutParams();
layoutParams.topMargin = physicalTop;
layoutParams.leftMargin = physicalLeft;
viewWrapper.setLayoutParams(layoutParams);
}
@Override
public void resize(
@NonNull PlatformViewsChannel.PlatformViewResizeRequest request,
@NonNull PlatformViewsChannel.PlatformViewBufferResized onComplete) {
final int physicalWidth = toPhysicalPixels(request.newLogicalWidth);
final int physicalHeight = toPhysicalPixels(request.newLogicalHeight);
final int viewId = request.viewId;
if (usesVirtualDisplay(viewId)) {
final float originalDisplayDensity = getDisplayDensity();
final VirtualDisplayController vdController = vdControllers.get(viewId);
// Resizing involved moving the platform view to a new virtual display. Doing so
// potentially results in losing an active input connection. To make sure we preserve
// the input connection when resizing we lock it here and unlock after the resize is
// complete.
lockInputConnection(vdController);
vdController.resize(
physicalWidth,
physicalHeight,
() -> {
unlockInputConnection(vdController);
// Converting back to logic pixels requires a context, which may no longer be
// available. If that happens, assume the same logic/physical relationship as
// was present when the request arrived.
final float displayDensity =
context == null ? originalDisplayDensity : getDisplayDensity();
onComplete.run(
new PlatformViewsChannel.PlatformViewBufferSize(
toLogicalPixels(vdController.getRenderTargetWidth(), displayDensity),
toLogicalPixels(vdController.getRenderTargetHeight(), displayDensity)));
});
return;
}
final PlatformView platformView = platformViews.get(viewId);
final PlatformViewWrapper viewWrapper = viewWrappers.get(viewId);
if (platformView == null || viewWrapper == null) {
Log.e(TAG, "Resizing unknown platform view with id: " + viewId);
return;
}
// Resize the buffer only when the current buffer size is smaller than the new size.
// This is required to prevent a situation when smooth keyboard animation
// resizes the texture too often, such that the GPU and the platform thread don't agree on
// the timing of the new size.
// Resizing the texture causes pixel stretching since the size of the GL texture used in
// the engine is set by the framework, but the texture buffer size is set by the
// platform down below.
if (physicalWidth > viewWrapper.getRenderTargetWidth()
|| physicalHeight > viewWrapper.getRenderTargetHeight()) {
viewWrapper.resizeRenderTarget(physicalWidth, physicalHeight);
}
final ViewGroup.LayoutParams viewWrapperLayoutParams = viewWrapper.getLayoutParams();
viewWrapperLayoutParams.width = physicalWidth;
viewWrapperLayoutParams.height = physicalHeight;
viewWrapper.setLayoutParams(viewWrapperLayoutParams);
final View embeddedView = platformView.getView();
if (embeddedView != null) {
final ViewGroup.LayoutParams embeddedViewLayoutParams = embeddedView.getLayoutParams();
embeddedViewLayoutParams.width = physicalWidth;
embeddedViewLayoutParams.height = physicalHeight;
embeddedView.setLayoutParams(embeddedViewLayoutParams);
}
onComplete.run(
new PlatformViewsChannel.PlatformViewBufferSize(
toLogicalPixels(viewWrapper.getRenderTargetWidth()),
toLogicalPixels(viewWrapper.getRenderTargetHeight())));
}
@Override
public void onTouch(@NonNull PlatformViewsChannel.PlatformViewTouch touch) {
final int viewId = touch.viewId;
final float density = context.getResources().getDisplayMetrics().density;
if (usesVirtualDisplay(viewId)) {
final VirtualDisplayController vdController = vdControllers.get(viewId);
final MotionEvent event = toMotionEvent(density, touch, true);
vdController.dispatchTouchEvent(event);
return;
}
final PlatformView platformView = platformViews.get(viewId);
if (platformView == null) {
Log.e(TAG, "Sending touch to an unknown view with id: " + viewId);
return;
}
final View view = platformView.getView();
if (view == null) {
Log.e(TAG, "Sending touch to a null view with id: " + viewId);
return;
}
final MotionEvent event = toMotionEvent(density, touch, false);
view.dispatchTouchEvent(event);
}
@Override
public void setDirection(int viewId, int direction) {
if (!validateDirection(direction)) {
throw new IllegalStateException(
"Trying to set unknown direction value: "
+ direction
+ "(view id: "
+ viewId
+ ")");
}
View embeddedView;
if (usesVirtualDisplay(viewId)) {
final VirtualDisplayController controller = vdControllers.get(viewId);
embeddedView = controller.getView();
} else {
final PlatformView platformView = platformViews.get(viewId);
if (platformView == null) {
Log.e(TAG, "Setting direction to an unknown view with id: " + viewId);
return;
}
embeddedView = platformView.getView();
}
if (embeddedView == null) {
Log.e(TAG, "Setting direction to a null view with id: " + viewId);
return;
}
embeddedView.setLayoutDirection(direction);
}
@Override
public void clearFocus(int viewId) {
View embeddedView;
if (usesVirtualDisplay(viewId)) {
final VirtualDisplayController controller = vdControllers.get(viewId);
embeddedView = controller.getView();
} else {
final PlatformView platformView = platformViews.get(viewId);
if (platformView == null) {
Log.e(TAG, "Clearing focus on an unknown view with id: " + viewId);
return;
}
embeddedView = platformView.getView();
}
if (embeddedView == null) {
Log.e(TAG, "Clearing focus on a null view with id: " + viewId);
return;
}
embeddedView.clearFocus();
}
@Override
public void synchronizeToNativeViewHierarchy(boolean yes) {
synchronizeToNativeViewHierarchy = yes;
}
};
/// Throws an exception if the SDK version is below minSdkVersion.
private void enforceMinimumAndroidApiVersion(int minSdkVersion) {
if (Build.VERSION.SDK_INT < minSdkVersion) {
throw new IllegalStateException(
"Trying to use platform views with API "
+ Build.VERSION.SDK_INT
+ ", required API level is: "
+ minSdkVersion);
}
}
private void ensureValidRequest(
@NonNull PlatformViewsChannel.PlatformViewCreationRequest request) {
if (!validateDirection(request.direction)) {
throw new IllegalStateException(
"Trying to create a view with unknown direction value: "
+ request.direction
+ "(view id: "
+ request.viewId
+ ")");
}
}
// Creates a platform view based on `request`, performs configuration that's common to
// all display modes, and adds it to `platformViews`.
@VisibleForTesting(otherwise = VisibleForTesting.PACKAGE_PRIVATE)
public PlatformView createPlatformView(
@NonNull PlatformViewsChannel.PlatformViewCreationRequest request, boolean wrapContext) {
final PlatformViewFactory viewFactory = registry.getFactory(request.viewType);
if (viewFactory == null) {
throw new IllegalStateException(
"Trying to create a platform view of unregistered type: " + request.viewType);
}
Object createParams = null;
if (request.params != null) {
createParams = viewFactory.getCreateArgsCodec().decodeMessage(request.params);
}
// In some display modes, the context needs to be modified during display.
// TODO(stuartmorgan): Make this wrapping unconditional if possible; for context see
// https://github.com/flutter/flutter/issues/113449
final Context mutableContext = wrapContext ? new MutableContextWrapper(context) : context;
final PlatformView platformView =
viewFactory.create(mutableContext, request.viewId, createParams);
// Configure the view to match the requested layout direction.
final View embeddedView = platformView.getView();
if (embeddedView == null) {
throw new IllegalStateException(
"PlatformView#getView() returned null, but an Android view reference was expected.");
}
embeddedView.setLayoutDirection(request.direction);
platformViews.put(request.viewId, platformView);
maybeInvokeOnFlutterViewAttached(platformView);
return platformView;
}
// Configures the view for Hybrid Composition mode.
private void configureForHybridComposition(
@NonNull PlatformView platformView,
@NonNull PlatformViewsChannel.PlatformViewCreationRequest request) {
enforceMinimumAndroidApiVersion(19);
Log.i(TAG, "Using hybrid composition for platform view: " + request.viewId);
}
// Configures the view for Virtual Display mode, returning the associated texture ID.
private long configureForVirtualDisplay(
@NonNull PlatformView platformView,
@NonNull PlatformViewsChannel.PlatformViewCreationRequest request) {
// This mode adds the view to a virtual display, which is wired up to a GL texture that
// is composed by the Flutter engine.
// API level 20 is required to use VirtualDisplay#setSurface.
enforceMinimumAndroidApiVersion(20);
Log.i(TAG, "Hosting view in a virtual display for platform view: " + request.viewId);
final PlatformViewRenderTarget renderTarget = makePlatformViewRenderTarget(textureRegistry);
final int physicalWidth = toPhysicalPixels(request.logicalWidth);
final int physicalHeight = toPhysicalPixels(request.logicalHeight);
final VirtualDisplayController vdController =
VirtualDisplayController.create(
context,
accessibilityEventsDelegate,
platformView,
renderTarget,
physicalWidth,
physicalHeight,
request.viewId,
null,
(view, hasFocus) -> {
if (hasFocus) {
platformViewsChannel.invokeViewFocused(request.viewId);
}
});
if (vdController == null) {
throw new IllegalStateException(
"Failed creating virtual display for a "
+ request.viewType
+ " with id: "
+ request.viewId);
}
// The embedded view doesn't need to be sized in Virtual Display mode because the
// virtual display itself is sized.
vdControllers.put(request.viewId, vdController);
final View embeddedView = platformView.getView();
contextToEmbeddedView.put(embeddedView.getContext(), embeddedView);
return renderTarget.getId();
}
// Configures the view for Texture Layer Hybrid Composition mode, returning the associated
// texture ID.
@TargetApi(API_LEVELS.API_23)
@VisibleForTesting(otherwise = VisibleForTesting.PACKAGE_PRIVATE)
public long configureForTextureLayerComposition(
@NonNull PlatformView platformView,
@NonNull PlatformViewsChannel.PlatformViewCreationRequest request) {
// This mode attaches the view to the Android view hierarchy and record its drawing
// operations, so they can be forwarded to a GL texture that is composed by the
// Flutter engine.
// API level 23 is required to use Surface#lockHardwareCanvas().
enforceMinimumAndroidApiVersion(23);
Log.i(TAG, "Hosting view in view hierarchy for platform view: " + request.viewId);
final int physicalWidth = toPhysicalPixels(request.logicalWidth);
final int physicalHeight = toPhysicalPixels(request.logicalHeight);
PlatformViewWrapper viewWrapper;
long textureId;
if (usesSoftwareRendering) {
viewWrapper = new PlatformViewWrapper(context);
textureId = -1;
} else {
final PlatformViewRenderTarget renderTarget = makePlatformViewRenderTarget(textureRegistry);
viewWrapper = new PlatformViewWrapper(context, renderTarget);
textureId = renderTarget.getId();
}
viewWrapper.setTouchProcessor(androidTouchProcessor);
viewWrapper.resizeRenderTarget(physicalWidth, physicalHeight);
final FrameLayout.LayoutParams viewWrapperLayoutParams =
new FrameLayout.LayoutParams(physicalWidth, physicalHeight);
// Size and position the view wrapper.
final int physicalTop = toPhysicalPixels(request.logicalTop);
final int physicalLeft = toPhysicalPixels(request.logicalLeft);
viewWrapperLayoutParams.topMargin = physicalTop;
viewWrapperLayoutParams.leftMargin = physicalLeft;
viewWrapper.setLayoutParams(viewWrapperLayoutParams);
// Size the embedded view.
final View embeddedView = platformView.getView();
embeddedView.setLayoutParams(new FrameLayout.LayoutParams(physicalWidth, physicalHeight));
// Accessibility in the embedded view is initially disabled because if a Flutter app
// disabled accessibility in the first frame, the embedding won't receive an update to
// disable accessibility since the embedding never received an update to enable it.
// The AccessibilityBridge keeps track of the accessibility nodes, and handles the deltas
// when the framework sends a new a11y tree to the embedding.
// To prevent races, the framework populate the SemanticsNode after the platform view has
// been created.
embeddedView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS);
// Add the embedded view to the wrapper.
viewWrapper.addView(embeddedView);
// Listen for focus changed in any subview, so the framework is notified when the platform
// view is focused.
viewWrapper.setOnDescendantFocusChangeListener(
(v, hasFocus) -> {
if (hasFocus) {
platformViewsChannel.invokeViewFocused(request.viewId);
} else if (textInputPlugin != null) {
textInputPlugin.clearPlatformViewClient(request.viewId);
}
});
flutterView.addView(viewWrapper);
viewWrappers.append(request.viewId, viewWrapper);
maybeInvokeOnFlutterViewAttached(platformView);
return textureId;
}
@VisibleForTesting
public MotionEvent toMotionEvent(
float density, PlatformViewsChannel.PlatformViewTouch touch, boolean usingVirtualDiplay) {
MotionEventTracker.MotionEventId motionEventId =
MotionEventTracker.MotionEventId.from(touch.motionEventId);
MotionEvent trackedEvent = motionEventTracker.pop(motionEventId);
if (!usingVirtualDiplay && trackedEvent != null) {
// We have the original event, deliver it as it will pass the verifiable
// input check.
return trackedEvent;
}
// We are in virtual display mode or don't have a reference to the original MotionEvent.
// In this case we manually recreate a MotionEvent to be delivered. This MotionEvent
// will fail the verifiable input check.
// Pointer coordinates in the tracked events are global to FlutterView
// framework converts them to be local to a widget, given that
// motion events operate on local coords, we need to replace these in the tracked
// event with their local counterparts.
PointerProperties[] pointerProperties =
parsePointerPropertiesList(touch.rawPointerPropertiesList)
.toArray(new PointerProperties[touch.pointerCount]);
PointerCoords[] pointerCoords =
parsePointerCoordsList(touch.rawPointerCoords, density)
.toArray(new PointerCoords[touch.pointerCount]);
// TODO (kaushikiska) : warn that we are potentially using an untracked
// event in the platform views.
return MotionEvent.obtain(
touch.downTime.longValue(),
touch.eventTime.longValue(),
touch.action,
touch.pointerCount,
pointerProperties,
pointerCoords,
touch.metaState,
touch.buttonState,
touch.xPrecision,
touch.yPrecision,
touch.deviceId,
touch.edgeFlags,
touch.source,
touch.flags);
}
public PlatformViewsController() {
registry = new PlatformViewRegistryImpl();
vdControllers = new HashMap<>();
accessibilityEventsDelegate = new AccessibilityEventsDelegate();
contextToEmbeddedView = new HashMap<>();
overlayLayerViews = new SparseArray<>();
currentFrameUsedOverlayLayerIds = new HashSet<>();
currentFrameUsedPlatformViewIds = new HashSet<>();
viewWrappers = new SparseArray<>();
platformViews = new SparseArray<>();
platformViewParent = new SparseArray<>();
motionEventTracker = MotionEventTracker.getInstance();
}
/**
* Attaches this platform views controller to its input and output channels.
*
* @param context The base context that will be passed to embedded views created by this
* controller. This should be the context of the Activity hosting the Flutter application.
* @param textureRegistry The texture registry which provides the output textures into which the
* embedded views will be rendered.
* @param dartExecutor The dart execution context, which is used to set up a system channel.
*/
public void attach(
@Nullable Context context,
@NonNull TextureRegistry textureRegistry,
@NonNull DartExecutor dartExecutor) {
if (this.context != null) {
throw new AssertionError(
"A PlatformViewsController can only be attached to a single output target.\n"
+ "attach was called while the PlatformViewsController was already attached.");
}
this.context = context;
this.textureRegistry = textureRegistry;
platformViewsChannel = new PlatformViewsChannel(dartExecutor);
platformViewsChannel.setPlatformViewsHandler(channelHandler);
}
/**
* Sets whether Flutter uses software rendering.
*
* <p>When software rendering is used, no GL context is available on the raster thread. When this
* is set to true, there's no Flutter composition of Android views and Flutter widgets since GL
* textures cannot be used.
*
* <p>Software rendering is only used for testing in emulators, and it should never be set to true
* in a production environment.
*
* @param useSoftwareRendering Whether software rendering is used.
*/
public void setSoftwareRendering(boolean useSoftwareRendering) {
usesSoftwareRendering = useSoftwareRendering;
}
/**
* Detaches this platform views controller.
*
* <p>This is typically called when a Flutter applications moves to run in the background, or is
* destroyed. After calling this the platform views controller will no longer listen to it's
* previous messenger, and will not maintain references to the texture registry, context, and
* messenger passed to the previous attach call.
*/
@UiThread
public void detach() {
if (platformViewsChannel != null) {
platformViewsChannel.setPlatformViewsHandler(null);
}
destroyOverlaySurfaces();
platformViewsChannel = null;
context = null;
textureRegistry = null;
}
/**
* Attaches the controller to a {@link FlutterView}.
*
* <p>When {@link io.flutter.embedding.android.FlutterFragment} is used, this method is called
* after the device rotates since the FlutterView is recreated after a rotation.
*/
public void attachToView(@NonNull FlutterView newFlutterView) {
flutterView = newFlutterView;
// Add wrapper for platform views that use GL texture.
for (int index = 0; index < viewWrappers.size(); index++) {
final PlatformViewWrapper view = viewWrappers.valueAt(index);
flutterView.addView(view);
}
// Add wrapper for platform views that are composed at the view hierarchy level.
for (int index = 0; index < platformViewParent.size(); index++) {
final FlutterMutatorView view = platformViewParent.valueAt(index);
flutterView.addView(view);
}
// Notify platform views that they are now attached to a FlutterView.
for (int index = 0; index < platformViews.size(); index++) {
final PlatformView view = platformViews.valueAt(index);
view.onFlutterViewAttached(flutterView);
}
}
/**
* Detaches the controller from {@link FlutterView}.
*
* <p>When {@link io.flutter.embedding.android.FlutterFragment} is used, this method is called
* when the device rotates since the FlutterView is detached from the fragment. The next time the
* fragment needs to be displayed, a new Flutter view is created, so attachToView is called again.
*/
public void detachFromView() {
// Remove wrapper for platform views that use GL texture.
for (int index = 0; index < viewWrappers.size(); index++) {
final PlatformViewWrapper view = viewWrappers.valueAt(index);
flutterView.removeView(view);
}
// Remove wrapper for platform views that are composed at the view hierarchy level.
for (int index = 0; index < platformViewParent.size(); index++) {
final FlutterMutatorView view = platformViewParent.valueAt(index);
flutterView.removeView(view);
}
destroyOverlaySurfaces();
removeOverlaySurfaces();
flutterView = null;
flutterViewConvertedToImageView = false;
// Notify that the platform view have been detached from FlutterView.
for (int index = 0; index < platformViews.size(); index++) {
final PlatformView view = platformViews.valueAt(index);
view.onFlutterViewDetached();
}
}
private void maybeInvokeOnFlutterViewAttached(PlatformView view) {
if (flutterView == null) {
Log.i(TAG, "null flutterView");
// There is currently no FlutterView that we are attached to.
return;
}
view.onFlutterViewAttached(flutterView);
}
@Override
public void attachAccessibilityBridge(@NonNull AccessibilityBridge accessibilityBridge) {
accessibilityEventsDelegate.setAccessibilityBridge(accessibilityBridge);
}
@Override
public void detachAccessibilityBridge() {
accessibilityEventsDelegate.setAccessibilityBridge(null);
}
/**
* Attaches this controller to a text input plugin.
*
* <p>While a text input plugin is available, the platform views controller interacts with it to
* facilitate delegation of text input connections to platform views.
*
* <p>A platform views controller should be attached to a text input plugin whenever it is
* possible for the Flutter framework to receive text input.
*/
public void attachTextInputPlugin(@NonNull TextInputPlugin textInputPlugin) {
this.textInputPlugin = textInputPlugin;
}
/** Detaches this controller from the currently attached text input plugin. */
public void detachTextInputPlugin() {
textInputPlugin = null;
}
/**
* Returns true if Flutter should perform input connection proxying for the view.
*
* <p>If the view is a platform view managed by this platform views controller returns true. Else
* if the view was created in a platform view's VD, delegates the decision to the platform view's
* {@link View#checkInputConnectionProxy(View)} method. Else returns false.
*/
public boolean checkInputConnectionProxy(@Nullable View view) {
// View can be null on some devices
// See: https://github.com/flutter/flutter/issues/36517
if (view == null) {
return false;
}
if (!contextToEmbeddedView.containsKey(view.getContext())) {
return false;
}
View platformView = contextToEmbeddedView.get(view.getContext());
if (platformView == view) {
return true;
}
return platformView.checkInputConnectionProxy(view);
}
public PlatformViewRegistry getRegistry() {
return registry;
}
/**
* Invoked when the {@link io.flutter.embedding.engine.FlutterEngine} that owns this {@link
* PlatformViewsController} attaches to JNI.
*/
public void onAttachedToJNI() {
// Currently no action needs to be taken after JNI attachment.
}
/**
* Invoked when the {@link io.flutter.embedding.engine.FlutterEngine} that owns this {@link
* PlatformViewsController} detaches from JNI.
*/
public void onDetachedFromJNI() {
diposeAllViews();
}
public void onPreEngineRestart() {
diposeAllViews();
}
@Override
@Nullable
public View getPlatformViewById(int viewId) {
if (usesVirtualDisplay(viewId)) {
final VirtualDisplayController controller = vdControllers.get(viewId);
return controller.getView();
}
final PlatformView platformView = platformViews.get(viewId);
if (platformView == null) {
return null;
}
return platformView.getView();
}
@Override
public boolean usesVirtualDisplay(int id) {
return vdControllers.containsKey(id);
}
private void lockInputConnection(@NonNull VirtualDisplayController controller) {
if (textInputPlugin == null) {
return;
}
textInputPlugin.lockPlatformViewInputConnection();
controller.onInputConnectionLocked();
}
private void unlockInputConnection(@NonNull VirtualDisplayController controller) {
if (textInputPlugin == null) {
return;
}
textInputPlugin.unlockPlatformViewInputConnection();
controller.onInputConnectionUnlocked();
}
private static PlatformViewRenderTarget makePlatformViewRenderTarget(
TextureRegistry textureRegistry) {
if (enableSurfaceProducerRenderTarget && Build.VERSION.SDK_INT >= API_LEVELS.API_29) {
final TextureRegistry.SurfaceProducer textureEntry = textureRegistry.createSurfaceProducer();
Log.i(TAG, "PlatformView is using SurfaceProducer backend");
return new SurfaceProducerPlatformViewRenderTarget(textureEntry);
}
if (enableImageRenderTarget && Build.VERSION.SDK_INT >= API_LEVELS.API_29) {
final TextureRegistry.ImageTextureEntry textureEntry = textureRegistry.createImageTexture();
Log.i(TAG, "PlatformView is using ImageReader backend");
return new ImageReaderPlatformViewRenderTarget(textureEntry);
}
final TextureRegistry.SurfaceTextureEntry textureEntry = textureRegistry.createSurfaceTexture();
Log.i(TAG, "PlatformView is using SurfaceTexture backend");
return new SurfaceTexturePlatformViewRenderTarget(textureEntry);
}
private static boolean validateDirection(int direction) {
return direction == View.LAYOUT_DIRECTION_LTR || direction == View.LAYOUT_DIRECTION_RTL;
}
@SuppressWarnings("unchecked")
private static List<PointerProperties> parsePointerPropertiesList(Object rawPropertiesList) {
List<Object> rawProperties = (List<Object>) rawPropertiesList;
List<PointerProperties> pointerProperties = new ArrayList<>();
for (Object o : rawProperties) {
pointerProperties.add(parsePointerProperties(o));
}
return pointerProperties;
}
@SuppressWarnings("unchecked")
private static PointerProperties parsePointerProperties(Object rawProperties) {
List<Object> propertiesList = (List<Object>) rawProperties;
PointerProperties properties = new MotionEvent.PointerProperties();
properties.id = (int) propertiesList.get(0);
properties.toolType = (int) propertiesList.get(1);
return properties;
}
@SuppressWarnings("unchecked")
private static List<PointerCoords> parsePointerCoordsList(Object rawCoordsList, float density) {
List<Object> rawCoords = (List<Object>) rawCoordsList;
List<PointerCoords> pointerCoords = new ArrayList<>();
for (Object o : rawCoords) {
pointerCoords.add(parsePointerCoords(o, density));
}
return pointerCoords;
}
@SuppressWarnings("unchecked")
private static PointerCoords parsePointerCoords(Object rawCoords, float density) {
List<Object> coordsList = (List<Object>) rawCoords;
PointerCoords coords = new MotionEvent.PointerCoords();
coords.orientation = (float) (double) coordsList.get(0);
coords.pressure = (float) (double) coordsList.get(1);
coords.size = (float) (double) coordsList.get(2);
coords.toolMajor = (float) ((double) coordsList.get(3) * density);
coords.toolMinor = (float) ((double) coordsList.get(4) * density);
coords.touchMajor = (float) ((double) coordsList.get(5) * density);
coords.touchMinor = (float) ((double) coordsList.get(6) * density);
coords.x = (float) ((double) coordsList.get(7) * density);
coords.y = (float) ((double) coordsList.get(8) * density);
return coords;
}
private float getDisplayDensity() {
return context.getResources().getDisplayMetrics().density;
}
private int toPhysicalPixels(double logicalPixels) {
return (int) Math.round(logicalPixels * getDisplayDensity());
}
private int toLogicalPixels(double physicalPixels, float displayDensity) {
return (int) Math.round(physicalPixels / displayDensity);
}
private int toLogicalPixels(double physicalPixels) {
return toLogicalPixels(physicalPixels, getDisplayDensity());
}
private void diposeAllViews() {
while (platformViews.size() > 0) {
final int viewId = platformViews.keyAt(0);
// Dispose deletes the entry from platformViews and clears associated resources.
channelHandler.dispose(viewId);
}
}
/**
* Disposes a single
*
* @param viewId the PlatformView ID.
*/
@VisibleForTesting
public void disposePlatformView(int viewId) {
channelHandler.dispose(viewId);
}
private void initializeRootImageViewIfNeeded() {
if (synchronizeToNativeViewHierarchy && !flutterViewConvertedToImageView) {
flutterView.convertToImageView();
flutterViewConvertedToImageView = true;
}
}
/**
* Initializes a platform view and adds it to the view hierarchy.
*
* @param viewId The view ID. This member is not intended for public use, and is only visible for
* testing.
*/
@VisibleForTesting
void initializePlatformViewIfNeeded(int viewId) {
final PlatformView platformView = platformViews.get(viewId);
if (platformView == null) {
throw new IllegalStateException(
"Platform view hasn't been initialized from the platform view channel.");
}
if (platformViewParent.get(viewId) != null) {
return;
}
final View embeddedView = platformView.getView();
if (embeddedView == null) {
throw new IllegalStateException(
"PlatformView#getView() returned null, but an Android view reference was expected.");
}
if (embeddedView.getParent() != null) {
throw new IllegalStateException(
"The Android view returned from PlatformView#getView() was already added to a parent"
+ " view.");
}
final FlutterMutatorView parentView =
new FlutterMutatorView(
context, context.getResources().getDisplayMetrics().density, androidTouchProcessor);
parentView.setOnDescendantFocusChangeListener(
(view, hasFocus) -> {
if (hasFocus) {
platformViewsChannel.invokeViewFocused(viewId);
} else if (textInputPlugin != null) {
textInputPlugin.clearPlatformViewClient(viewId);
}
});
platformViewParent.put(viewId, parentView);
// Accessibility in the embedded view is initially disabled because if a Flutter app disabled
// accessibility in the first frame, the embedding won't receive an update to disable
// accessibility since the embedding never received an update to enable it.
// The AccessibilityBridge keeps track of the accessibility nodes, and handles the deltas when
// the framework sends a new a11y tree to the embedding.
// To prevent races, the framework populate the SemanticsNode after the platform view has been
// created.
embeddedView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS);
parentView.addView(embeddedView);
flutterView.addView(parentView);
}
public void attachToFlutterRenderer(@NonNull FlutterRenderer flutterRenderer) {
androidTouchProcessor = new AndroidTouchProcessor(flutterRenderer, /*trackMotionEvents=*/ true);
}
/**
* Called when a platform view id displayed in the current frame.
*
* @param viewId The ID of the platform view.
* @param x The left position relative to {@code FlutterView}.
* @param y The top position relative to {@code FlutterView}.
* @param width The width of the platform view.
* @param height The height of the platform view.
* @param viewWidth The original width of the platform view before applying the mutator stack.
* @param viewHeight The original height of the platform view before applying the mutator stack.
* @param mutatorsStack The mutator stack. This member is not intended for public use, and is only
* visible for testing.
*/
public void onDisplayPlatformView(
int viewId,
int x,
int y,
int width,
int height,
int viewWidth,
int viewHeight,
@NonNull FlutterMutatorsStack mutatorsStack) {
initializeRootImageViewIfNeeded();
initializePlatformViewIfNeeded(viewId);
final FlutterMutatorView parentView = platformViewParent.get(viewId);
parentView.readyToDisplay(mutatorsStack, x, y, width, height);
parentView.setVisibility(View.VISIBLE);
parentView.bringToFront();
final FrameLayout.LayoutParams layoutParams =
new FrameLayout.LayoutParams(viewWidth, viewHeight);
final View view = platformViews.get(viewId).getView();
if (view != null) {
view.setLayoutParams(layoutParams);
view.bringToFront();
}
currentFrameUsedPlatformViewIds.add(viewId);
}
/**
* Called when an overlay surface is displayed in the current frame.
*
* @param id The ID of the surface.
* @param x The left position relative to {@code FlutterView}.
* @param y The top position relative to {@code FlutterView}.
* @param width The width of the surface.
* @param height The height of the surface. This member is not intended for public use, and is
* only visible for testing.
*/
public void onDisplayOverlaySurface(int id, int x, int y, int width, int height) {
if (overlayLayerViews.get(id) == null) {
throw new IllegalStateException("The overlay surface (id:" + id + ") doesn't exist");
}
initializeRootImageViewIfNeeded();
final PlatformOverlayView overlayView = overlayLayerViews.get(id);
if (overlayView.getParent() == null) {
flutterView.addView(overlayView);
}
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams((int) width, (int) height);
layoutParams.leftMargin = (int) x;
layoutParams.topMargin = (int) y;
overlayView.setLayoutParams(layoutParams);
overlayView.setVisibility(View.VISIBLE);
overlayView.bringToFront();
currentFrameUsedOverlayLayerIds.add(id);
}
public void onBeginFrame() {
currentFrameUsedOverlayLayerIds.clear();
currentFrameUsedPlatformViewIds.clear();
}
/**
* Called by {@code FlutterJNI} when the Flutter frame was submitted.
*
* <p>This member is not intended for public use, and is only visible for testing.
*/
public void onEndFrame() {
// If there are no platform views in the current frame,
// then revert the image view surface and use the previous surface.
//
// Otherwise, acquire the latest image.
if (flutterViewConvertedToImageView && currentFrameUsedPlatformViewIds.isEmpty()) {
flutterViewConvertedToImageView = false;
flutterView.revertImageView(
() -> {
// Destroy overlay surfaces once the surface reversion is completed.
finishFrame(false);
});
return;
}
// Whether the current frame was rendered using ImageReaders.
//
// Since the image readers may not have images available at this point,
// this becomes true if all the required surfaces have images available.
//
// This is used to decide if the platform views can be rendered in the current frame.
// If one of the surfaces doesn't have an image, the frame may be incomplete and must be
// dropped.
// For example, a toolbar widget painted by Flutter may not be rendered.
final boolean isFrameRenderedUsingImageReaders =
flutterViewConvertedToImageView && flutterView.acquireLatestImageViewFrame();
finishFrame(isFrameRenderedUsingImageReaders);
}
private void finishFrame(boolean isFrameRenderedUsingImageReaders) {
for (int i = 0; i < overlayLayerViews.size(); i++) {
final int overlayId = overlayLayerViews.keyAt(i);
final PlatformOverlayView overlayView = overlayLayerViews.valueAt(i);
if (currentFrameUsedOverlayLayerIds.contains(overlayId)) {
flutterView.attachOverlaySurfaceToRender(overlayView);
final boolean didAcquireOverlaySurfaceImage = overlayView.acquireLatestImage();
isFrameRenderedUsingImageReaders &= didAcquireOverlaySurfaceImage;
} else {
// If the background surface isn't rendered by the image view, then the
// overlay surfaces can be detached from the rendered.
// This releases resources used by the ImageReader.
if (!flutterViewConvertedToImageView) {
overlayView.detachFromRenderer();
}
// Hide overlay surfaces that aren't rendered in the current frame.
overlayView.setVisibility(View.GONE);
flutterView.removeView(overlayView);
}
}
for (int i = 0; i < platformViewParent.size(); i++) {
final int viewId = platformViewParent.keyAt(i);
final View parentView = platformViewParent.get(viewId);
// This should only show platform views that are rendered in this frame and either:
// 1. Surface has images available in this frame or,
// 2. Surface does not have images available in this frame because the render surface should
// not be an ImageView.
//
// The platform view is appended to a mutator view.
//
// Otherwise, hide the platform view, but don't remove it from the view hierarchy yet as
// they are removed when the framework disposes the platform view widget.
if (currentFrameUsedPlatformViewIds.contains(viewId)
&& (isFrameRenderedUsingImageReaders || !synchronizeToNativeViewHierarchy)) {
parentView.setVisibility(View.VISIBLE);
} else {
parentView.setVisibility(View.GONE);
}
}
}
/**
* Creates and tracks the overlay surface.
*
* @param imageView The surface that displays the overlay.
* @return Wrapper object that provides the layer id and the surface. This member is not intended
* for public use, and is only visible for testing.
*/
@VisibleForTesting
@NonNull
public FlutterOverlaySurface createOverlaySurface(@NonNull PlatformOverlayView imageView) {
final int id = nextOverlayLayerId++;
overlayLayerViews.put(id, imageView);
return new FlutterOverlaySurface(id, imageView.getSurface());
}
/**
* Creates an overlay surface while the Flutter view is rendered by {@code PlatformOverlayView}.
*
* <p>This method is invoked by {@code FlutterJNI} only.
*
* <p>This member is not intended for public use, and is only visible for testing.
*/
@NonNull
public FlutterOverlaySurface createOverlaySurface() {
// Overlay surfaces have the same size as the background surface.
//
// This allows to reuse these surfaces in consecutive frames even
// if the drawings they contain have a different tight bound.
//
// The final view size is determined when its frame is set.
return createOverlaySurface(
new PlatformOverlayView(
flutterView.getContext(),
flutterView.getWidth(),
flutterView.getHeight(),
accessibilityEventsDelegate));
}
/**
* Destroys the overlay surfaces and removes them from the view hierarchy.
*
* <p>This method is used only internally by {@code FlutterJNI}.
*/
public void destroyOverlaySurfaces() {
for (int viewId = 0; viewId < overlayLayerViews.size(); viewId++) {
final PlatformOverlayView overlayView = overlayLayerViews.valueAt(viewId);
overlayView.detachFromRenderer();
overlayView.closeImageReader();
// Don't remove overlayView from the view hierarchy since this method can
// be called while the Android framework is iterating over the array of views.
// See ViewGroup#dispatchDetachedFromWindow(), and
// https://github.com/flutter/flutter/issues/97679.
}
}
private void removeOverlaySurfaces() {
if (flutterView == null) {
Log.e(TAG, "removeOverlaySurfaces called while flutter view is null");
return;
}
for (int viewId = 0; viewId < overlayLayerViews.size(); viewId++) {
flutterView.removeView(overlayLayerViews.valueAt(viewId));
}
overlayLayerViews.clear();
}
@VisibleForTesting
public SparseArray<PlatformOverlayView> getOverlayLayerViews() {
return overlayLayerViews;
}
}
| engine/shell/platform/android/io/flutter/plugin/platform/PlatformViewsController.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/plugin/platform/PlatformViewsController.java",
"repo_id": "engine",
"token_count": 19533
} | 342 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.view;
import static io.flutter.Build.API_LEVELS;
import android.annotation.SuppressLint;
import android.graphics.Rect;
import android.os.Build;
import android.os.Bundle;
import android.os.Parcel;
import android.util.SparseArray;
import android.view.MotionEvent;
import android.view.View;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import android.view.accessibility.AccessibilityNodeProvider;
import android.view.accessibility.AccessibilityRecord;
import androidx.annotation.Keep;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import io.flutter.Log;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
/**
* Facilitates embedding of platform views in the accessibility tree generated by the accessibility
* bridge.
*
* <p>Embedding is done by mirroring the accessibility tree of the platform view as a subtree of the
* flutter accessibility tree.
*
* <p>This class relies on hidden system APIs to extract the accessibility information and does not
* work starting Android P; If the reflection accessors are not available we fail silently by
* embedding a null node, the app continues working but the accessibility information for the
* platform view will not be embedded.
*
* <p>We use the term `flutterId` for virtual accessibility node IDs in the FlutterView tree, and
* the term `originId` for the virtual accessibility node IDs in the platform view's tree.
* Internally this class maintains a bidirectional mapping between `flutterId`s and the
* corresponding platform view and `originId`.
*/
@Keep
class AccessibilityViewEmbedder {
private static final String TAG = "AccessibilityBridge";
private final ReflectionAccessors reflectionAccessors;
// The view to which the platform view is embedded, this is typically FlutterView.
private final View rootAccessibilityView;
// Maps a flutterId to the corresponding platform view and originId.
private final SparseArray<ViewAndId> flutterIdToOrigin;
// Maps a platform view and originId to a corresponding flutterID.
private final Map<ViewAndId, Integer> originToFlutterId;
// Maps an embedded view to it's screen bounds.
// This is used to translate the coordinates of the accessibility node subtree to the main
// display's coordinate
// system.
private final Map<View, Rect> embeddedViewToDisplayBounds;
private int nextFlutterId;
AccessibilityViewEmbedder(@NonNull View rootAccessibiiltyView, int firstVirtualNodeId) {
reflectionAccessors = new ReflectionAccessors();
flutterIdToOrigin = new SparseArray<>();
this.rootAccessibilityView = rootAccessibiiltyView;
nextFlutterId = firstVirtualNodeId;
originToFlutterId = new HashMap<>();
embeddedViewToDisplayBounds = new HashMap<>();
}
/**
* Returns the root accessibility node for an embedded platform view.
*
* @param flutterId the virtual accessibility ID for the node in flutter accessibility tree
* @param displayBounds the display bounds for the node in screen coordinates
*/
public AccessibilityNodeInfo getRootNode(
@NonNull View embeddedView, int flutterId, @NonNull Rect displayBounds) {
AccessibilityNodeInfo originNode = embeddedView.createAccessibilityNodeInfo();
Long originPackedId = reflectionAccessors.getSourceNodeId(originNode);
if (originPackedId == null) {
return null;
}
embeddedViewToDisplayBounds.put(embeddedView, displayBounds);
int originId = ReflectionAccessors.getVirtualNodeId(originPackedId);
cacheVirtualIdMappings(embeddedView, originId, flutterId);
return convertToFlutterNode(originNode, flutterId, embeddedView);
}
/** Creates the accessibility node info for the node identified with `flutterId`. */
@Nullable
public AccessibilityNodeInfo createAccessibilityNodeInfo(int flutterId) {
ViewAndId origin = flutterIdToOrigin.get(flutterId);
if (origin == null) {
return null;
}
if (!embeddedViewToDisplayBounds.containsKey(origin.view)) {
// This might happen if the embedded view is sending accessibility event before the first
// Flutter semantics
// tree was sent to the accessibility bridge. In this case we don't return a node as we do not
// know the
// bounds yet.
// https://github.com/flutter/flutter/issues/30068
return null;
}
AccessibilityNodeProvider provider = origin.view.getAccessibilityNodeProvider();
if (provider == null) {
// The provider is null for views that don't have a virtual accessibility tree.
// We currently only support embedding virtual hierarchies in the Flutter tree.
// TODO(amirh): support embedding non virtual hierarchies.
// https://github.com/flutter/flutter/issues/29717
return null;
}
AccessibilityNodeInfo originNode =
origin.view.getAccessibilityNodeProvider().createAccessibilityNodeInfo(origin.id);
if (originNode == null) {
return null;
}
return convertToFlutterNode(originNode, flutterId, origin.view);
}
/*
* Creates an AccessibilityNodeInfo that can be attached to the Flutter accessibility tree and is equivalent to
* originNode(which belongs to embeddedView). The virtual ID for the created node will be flutterId.
*/
@NonNull
private AccessibilityNodeInfo convertToFlutterNode(
@NonNull AccessibilityNodeInfo originNode, int flutterId, @NonNull View embeddedView) {
AccessibilityNodeInfo result = AccessibilityNodeInfo.obtain(rootAccessibilityView, flutterId);
result.setPackageName(rootAccessibilityView.getContext().getPackageName());
result.setSource(rootAccessibilityView, flutterId);
result.setClassName(originNode.getClassName());
Rect displayBounds = embeddedViewToDisplayBounds.get(embeddedView);
copyAccessibilityFields(originNode, result);
setFlutterNodesTranslateBounds(originNode, displayBounds, result);
addChildrenToFlutterNode(originNode, embeddedView, result);
setFlutterNodeParent(originNode, embeddedView, result);
return result;
}
private void setFlutterNodeParent(
@NonNull AccessibilityNodeInfo originNode,
@NonNull View embeddedView,
@NonNull AccessibilityNodeInfo result) {
Long parentOriginPackedId = reflectionAccessors.getParentNodeId(originNode);
if (parentOriginPackedId == null) {
return;
}
int parentOriginId = ReflectionAccessors.getVirtualNodeId(parentOriginPackedId);
Integer parentFlutterId = originToFlutterId.get(new ViewAndId(embeddedView, parentOriginId));
if (parentFlutterId != null) {
result.setParent(rootAccessibilityView, parentFlutterId);
}
}
private void addChildrenToFlutterNode(
@NonNull AccessibilityNodeInfo originNode,
@NonNull View embeddedView,
@NonNull AccessibilityNodeInfo resultNode) {
for (int i = 0; i < originNode.getChildCount(); i++) {
Long originPackedId = reflectionAccessors.getChildId(originNode, i);
if (originPackedId == null) {
continue;
}
int originId = ReflectionAccessors.getVirtualNodeId(originPackedId);
ViewAndId origin = new ViewAndId(embeddedView, originId);
int childFlutterId;
if (originToFlutterId.containsKey(origin)) {
childFlutterId = originToFlutterId.get(origin);
} else {
childFlutterId = nextFlutterId++;
cacheVirtualIdMappings(embeddedView, originId, childFlutterId);
}
resultNode.addChild(rootAccessibilityView, childFlutterId);
}
}
// Caches a bidirectional mapping of (embeddedView, originId)<-->flutterId.
// Where originId is a virtual node ID in the embeddedView's tree, and flutterId is the ID
// of the corresponding node in the Flutter virtual accessibility nodes tree.
private void cacheVirtualIdMappings(@NonNull View embeddedView, int originId, int flutterId) {
ViewAndId origin = new ViewAndId(embeddedView, originId);
originToFlutterId.put(origin, flutterId);
flutterIdToOrigin.put(flutterId, origin);
}
// Suppressing deprecation warning for AccessibilityNodeInfo#getBoundsinParent and
// AccessibilityNodeInfo#getBoundsinParent as we are copying the platform view's
// accessibility node and we should not lose any available bounds information.
@SuppressWarnings("deprecation")
private void setFlutterNodesTranslateBounds(
@NonNull AccessibilityNodeInfo originNode,
@NonNull Rect displayBounds,
@NonNull AccessibilityNodeInfo resultNode) {
Rect boundsInParent = new Rect();
originNode.getBoundsInParent(boundsInParent);
resultNode.setBoundsInParent(boundsInParent);
Rect boundsInScreen = new Rect();
originNode.getBoundsInScreen(boundsInScreen);
boundsInScreen.offset(displayBounds.left, displayBounds.top);
resultNode.setBoundsInScreen(boundsInScreen);
}
private void copyAccessibilityFields(
@NonNull AccessibilityNodeInfo input, @NonNull AccessibilityNodeInfo output) {
output.setAccessibilityFocused(input.isAccessibilityFocused());
output.setCheckable(input.isCheckable());
output.setChecked(input.isChecked());
output.setContentDescription(input.getContentDescription());
output.setEnabled(input.isEnabled());
output.setClickable(input.isClickable());
output.setFocusable(input.isFocusable());
output.setFocused(input.isFocused());
output.setLongClickable(input.isLongClickable());
output.setMovementGranularities(input.getMovementGranularities());
output.setPassword(input.isPassword());
output.setScrollable(input.isScrollable());
output.setSelected(input.isSelected());
output.setText(input.getText());
output.setVisibleToUser(input.isVisibleToUser());
output.setEditable(input.isEditable());
output.setCanOpenPopup(input.canOpenPopup());
output.setCollectionInfo(input.getCollectionInfo());
output.setCollectionItemInfo(input.getCollectionItemInfo());
output.setContentInvalid(input.isContentInvalid());
output.setDismissable(input.isDismissable());
output.setInputType(input.getInputType());
output.setLiveRegion(input.getLiveRegion());
output.setMultiLine(input.isMultiLine());
output.setRangeInfo(input.getRangeInfo());
output.setError(input.getError());
output.setMaxTextLength(input.getMaxTextLength());
if (Build.VERSION.SDK_INT >= API_LEVELS.API_23) {
output.setContextClickable(input.isContextClickable());
// TODO(amirh): copy traversal before and after.
// https://github.com/flutter/flutter/issues/29718
}
if (Build.VERSION.SDK_INT >= API_LEVELS.API_24) {
output.setDrawingOrder(input.getDrawingOrder());
output.setImportantForAccessibility(input.isImportantForAccessibility());
}
if (Build.VERSION.SDK_INT >= API_LEVELS.API_26) {
output.setAvailableExtraData(input.getAvailableExtraData());
output.setHintText(input.getHintText());
output.setShowingHintText(input.isShowingHintText());
}
}
/**
* Delegates an AccessibilityNodeProvider#requestSendAccessibilityEvent from the
* AccessibilityBridge to the embedded view.
*
* @return True if the event was sent.
*/
public boolean requestSendAccessibilityEvent(
@NonNull View embeddedView, @NonNull View eventOrigin, @NonNull AccessibilityEvent event) {
AccessibilityEvent translatedEvent = AccessibilityEvent.obtain(event);
Long originPackedId = reflectionAccessors.getRecordSourceNodeId(event);
if (originPackedId == null) {
return false;
}
int originVirtualId = ReflectionAccessors.getVirtualNodeId(originPackedId);
Integer flutterId = originToFlutterId.get(new ViewAndId(embeddedView, originVirtualId));
if (flutterId == null) {
flutterId = nextFlutterId++;
cacheVirtualIdMappings(embeddedView, originVirtualId, flutterId);
}
translatedEvent.setSource(rootAccessibilityView, flutterId);
translatedEvent.setClassName(event.getClassName());
translatedEvent.setPackageName(event.getPackageName());
for (int i = 0; i < translatedEvent.getRecordCount(); i++) {
AccessibilityRecord record = translatedEvent.getRecord(i);
Long recordOriginPackedId = reflectionAccessors.getRecordSourceNodeId(record);
if (recordOriginPackedId == null) {
return false;
}
int recordOriginVirtualID = ReflectionAccessors.getVirtualNodeId(recordOriginPackedId);
ViewAndId originViewAndId = new ViewAndId(embeddedView, recordOriginVirtualID);
if (!originToFlutterId.containsKey(originViewAndId)) {
return false;
}
int recordFlutterId = originToFlutterId.get(originViewAndId);
record.setSource(rootAccessibilityView, recordFlutterId);
}
return rootAccessibilityView
.getParent()
.requestSendAccessibilityEvent(eventOrigin, translatedEvent);
}
/**
* Delegates an @{link AccessibilityNodeProvider#performAction} from the AccessibilityBridge to
* the embedded view's accessibility node provider.
*
* @return True if the action was performed.
*/
public boolean performAction(int flutterId, int accessibilityAction, @Nullable Bundle arguments) {
ViewAndId origin = flutterIdToOrigin.get(flutterId);
if (origin == null) {
return false;
}
View embeddedView = origin.view;
AccessibilityNodeProvider provider = embeddedView.getAccessibilityNodeProvider();
if (provider == null) {
return false;
}
return provider.performAction(origin.id, accessibilityAction, arguments);
}
/**
* Returns a flutterID for an accessibility record, or null if no mapping exists.
*
* @param embeddedView the embedded view that the record is associated with.
*/
@Nullable
public Integer getRecordFlutterId(
@NonNull View embeddedView, @NonNull AccessibilityRecord record) {
Long originPackedId = reflectionAccessors.getRecordSourceNodeId(record);
if (originPackedId == null) {
return null;
}
int originVirtualId = ReflectionAccessors.getVirtualNodeId(originPackedId);
return originToFlutterId.get(new ViewAndId(embeddedView, originVirtualId));
}
/**
* Delegates a View#onHoverEvent event from the AccessibilityBridge to an embedded view.
*
* <p>The pointer coordinates are translated to the embedded view's coordinate system.
*/
public boolean onAccessibilityHoverEvent(int rootFlutterId, @NonNull MotionEvent event) {
ViewAndId origin = flutterIdToOrigin.get(rootFlutterId);
if (origin == null) {
return false;
}
Rect displayBounds = embeddedViewToDisplayBounds.get(origin.view);
int pointerCount = event.getPointerCount();
MotionEvent.PointerProperties[] pointerProperties =
new MotionEvent.PointerProperties[pointerCount];
MotionEvent.PointerCoords[] pointerCoords = new MotionEvent.PointerCoords[pointerCount];
for (int i = 0; i < event.getPointerCount(); i++) {
pointerProperties[i] = new MotionEvent.PointerProperties();
event.getPointerProperties(i, pointerProperties[i]);
MotionEvent.PointerCoords originCoords = new MotionEvent.PointerCoords();
event.getPointerCoords(i, originCoords);
pointerCoords[i] = new MotionEvent.PointerCoords(originCoords);
pointerCoords[i].x -= displayBounds.left;
pointerCoords[i].y -= displayBounds.top;
}
MotionEvent translatedEvent =
MotionEvent.obtain(
event.getDownTime(),
event.getEventTime(),
event.getAction(),
event.getPointerCount(),
pointerProperties,
pointerCoords,
event.getMetaState(),
event.getButtonState(),
event.getXPrecision(),
event.getYPrecision(),
event.getDeviceId(),
event.getEdgeFlags(),
event.getSource(),
event.getFlags());
return origin.view.dispatchGenericMotionEvent(translatedEvent);
}
/**
* Returns the View that contains the accessibility node identified by the provided flutterId or
* null if it doesn't belong to a view.
*/
public View platformViewOfNode(int flutterId) {
ViewAndId viewAndId = flutterIdToOrigin.get(flutterId);
if (viewAndId == null) {
return null;
}
return viewAndId.view;
}
private static class ViewAndId {
final View view;
final int id;
private ViewAndId(View view, int id) {
this.view = view;
this.id = id;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ViewAndId)) return false;
ViewAndId viewAndId = (ViewAndId) o;
return id == viewAndId.id && view.equals(viewAndId.view);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + view.hashCode();
result = prime * result + id;
return result;
}
}
private static class ReflectionAccessors {
private @Nullable final Method getSourceNodeId;
private @Nullable final Method getParentNodeId;
private @Nullable final Method getRecordSourceNodeId;
private @Nullable final Method getChildId;
private @Nullable final Field childNodeIdsField;
private @Nullable final Method longArrayGetIndex;
@SuppressLint("DiscouragedPrivateApi,PrivateApi")
private ReflectionAccessors() {
Method getSourceNodeId = null;
Method getParentNodeId = null;
Method getRecordSourceNodeId = null;
Method getChildId = null;
Field childNodeIdsField = null;
Method longArrayGetIndex = null;
try {
getSourceNodeId = AccessibilityNodeInfo.class.getMethod("getSourceNodeId");
} catch (NoSuchMethodException e) {
Log.w(TAG, "can't invoke AccessibilityNodeInfo#getSourceNodeId with reflection");
}
try {
getRecordSourceNodeId = AccessibilityRecord.class.getMethod("getSourceNodeId");
} catch (NoSuchMethodException e) {
Log.w(TAG, "can't invoke AccessibiiltyRecord#getSourceNodeId with reflection");
}
// Reflection access is not allowed starting Android P on these methods.
if (Build.VERSION.SDK_INT <= API_LEVELS.API_26) {
try {
getParentNodeId = AccessibilityNodeInfo.class.getMethod("getParentNodeId");
} catch (NoSuchMethodException e) {
Log.w(TAG, "can't invoke getParentNodeId with reflection");
}
// Starting P we extract the child id from the mChildNodeIds field (see getChildId
// below).
try {
getChildId = AccessibilityNodeInfo.class.getMethod("getChildId", int.class);
} catch (NoSuchMethodException e) {
Log.w(TAG, "can't invoke getChildId with reflection");
}
} else {
try {
childNodeIdsField = AccessibilityNodeInfo.class.getDeclaredField("mChildNodeIds");
childNodeIdsField.setAccessible(true);
// The private member is a private utility class to Android. We need to use
// reflection to actually handle the data too.
longArrayGetIndex = Class.forName("android.util.LongArray").getMethod("get", int.class);
} catch (NoSuchFieldException
| ClassNotFoundException
| NoSuchMethodException
| NullPointerException e) {
Log.w(TAG, "can't access childNodeIdsField with reflection");
childNodeIdsField = null;
}
}
this.getSourceNodeId = getSourceNodeId;
this.getParentNodeId = getParentNodeId;
this.getRecordSourceNodeId = getRecordSourceNodeId;
this.getChildId = getChildId;
this.childNodeIdsField = childNodeIdsField;
this.longArrayGetIndex = longArrayGetIndex;
}
/** Returns virtual node ID given packed node ID used internally in accessibility API. */
private static int getVirtualNodeId(long nodeId) {
return (int) (nodeId >> 32);
}
@Nullable
private Long getSourceNodeId(@NonNull AccessibilityNodeInfo node) {
if (getSourceNodeId == null) {
return null;
}
try {
return (Long) getSourceNodeId.invoke(node);
} catch (IllegalAccessException e) {
Log.w(TAG, "Failed to access getSourceNodeId method.", e);
} catch (InvocationTargetException e) {
Log.w(TAG, "The getSourceNodeId method threw an exception when invoked.", e);
}
return null;
}
@Nullable
private Long getChildId(@NonNull AccessibilityNodeInfo node, int child) {
if (getChildId == null && (childNodeIdsField == null || longArrayGetIndex == null)) {
return null;
}
if (getChildId != null) {
try {
return (Long) getChildId.invoke(node, child);
// Using identical separate catch blocks to comply with the following lint:
// Error: Multi-catch with these reflection exceptions requires API level 19
// (current min is 16) because they get compiled to the common but new super
// type ReflectiveOperationException. As a workaround either create individual
// catch statements, or catch Exception. [NewApi]
} catch (IllegalAccessException e) {
Log.w(TAG, "Failed to access getChildId method.", e);
} catch (InvocationTargetException e) {
Log.w(TAG, "The getChildId method threw an exception when invoked.", e);
}
} else {
try {
return (long) longArrayGetIndex.invoke(childNodeIdsField.get(node), child);
// Using identical separate catch blocks to comply with the following lint:
// Error: Multi-catch with these reflection exceptions requires API level 19
// (current min is 16) because they get compiled to the common but new super
// type ReflectiveOperationException. As a workaround either create individual
// catch statements, or catch Exception. [NewApi]
} catch (IllegalAccessException e) {
Log.w(TAG, "Failed to access longArrayGetIndex method or the childNodeId field.", e);
} catch (InvocationTargetException | ArrayIndexOutOfBoundsException e) {
Log.w(TAG, "The longArrayGetIndex method threw an exception when invoked.", e);
}
}
return null;
}
@Nullable
private Long getParentNodeId(@NonNull AccessibilityNodeInfo node) {
if (getParentNodeId != null) {
try {
return (long) getParentNodeId.invoke(node);
// Using identical separate catch blocks to comply with the following lint:
// Error: Multi-catch with these reflection exceptions requires API level 19
// (current min is 16) because they get compiled to the common but new super
// type ReflectiveOperationException. As a workaround either create individual
// catch statements, or catch Exception. [NewApi]
} catch (IllegalAccessException e) {
Log.w(TAG, "Failed to access getParentNodeId method.", e);
} catch (InvocationTargetException e) {
Log.w(TAG, "The getParentNodeId method threw an exception when invoked.", e);
}
}
// Fall back on reading the ID from a serialized data if we absolutely have to.
return yoinkParentIdFromParcel(node);
}
// If this looks like it's failing, that's because it probably is. This method is relying on
// the implementation details of `AccessibilityNodeInfo#writeToParcel` in order to find the
// particular bit in the opaque parcel that represents mParentNodeId. If the implementation
// details change from our assumptions in this method, this will silently break.
@Nullable
private static Long yoinkParentIdFromParcel(AccessibilityNodeInfo node) {
if (Build.VERSION.SDK_INT < API_LEVELS.API_26) {
Log.w(TAG, "Unexpected Android version. Unable to find the parent ID.");
return null;
}
// We're creating a copy here because writing a node to a parcel recycles it. Objects
// are passed by reference in Java. So even though this method doesn't seem to use the
// node again, it's really used in other methods that would throw exceptions if we
// recycle it here.
AccessibilityNodeInfo copy = AccessibilityNodeInfo.obtain(node);
final Parcel parcel = Parcel.obtain();
parcel.setDataPosition(0);
copy.writeToParcel(parcel, /*flags=*/ 0);
Long parentNodeId = null;
// Match the internal logic that sets where mParentId actually ends up finally living.
// This logic should match
// https://android.googlesource.com/platform/frameworks/base/+/0b5ca24a4/core/java/android/view/accessibility/AccessibilityNodeInfo.java#3524.
parcel.setDataPosition(0);
long nonDefaultFields = parcel.readLong();
int fieldIndex = 0;
if (isBitSet(nonDefaultFields, fieldIndex++)) {
parcel.readInt(); // mIsSealed
}
if (isBitSet(nonDefaultFields, fieldIndex++)) {
parcel.readLong(); // mSourceNodeId
}
if (isBitSet(nonDefaultFields, fieldIndex++)) {
parcel.readInt(); // mWindowId
}
if (isBitSet(nonDefaultFields, fieldIndex++)) {
parentNodeId = parcel.readLong();
}
parcel.recycle();
return parentNodeId;
}
private static boolean isBitSet(long flags, int bitIndex) {
return (flags & (1L << bitIndex)) != 0;
}
@Nullable
private Long getRecordSourceNodeId(@NonNull AccessibilityRecord node) {
if (getRecordSourceNodeId == null) {
return null;
}
try {
return (Long) getRecordSourceNodeId.invoke(node);
} catch (IllegalAccessException e) {
Log.w(TAG, "Failed to access the getRecordSourceNodeId method.", e);
} catch (InvocationTargetException e) {
Log.w(TAG, "The getRecordSourceNodeId method threw an exception when invoked.", e);
}
return null;
}
}
}
| engine/shell/platform/android/io/flutter/view/AccessibilityViewEmbedder.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/view/AccessibilityViewEmbedder.java",
"repo_id": "engine",
"token_count": 8963
} | 343 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/android/platform_message_response_android.h"
#include <utility>
#include "flutter/fml/make_copyable.h"
#include "flutter/shell/platform/android/jni/platform_view_android_jni.h"
namespace flutter {
PlatformMessageResponseAndroid::PlatformMessageResponseAndroid(
int response_id,
std::shared_ptr<PlatformViewAndroidJNI> jni_facade,
fml::RefPtr<fml::TaskRunner> platform_task_runner)
: response_id_(response_id),
jni_facade_(std::move(jni_facade)),
platform_task_runner_(std::move(platform_task_runner)) {}
PlatformMessageResponseAndroid::~PlatformMessageResponseAndroid() = default;
// |flutter::PlatformMessageResponse|
void PlatformMessageResponseAndroid::Complete(
std::unique_ptr<fml::Mapping> data) {
platform_task_runner_->PostTask(
fml::MakeCopyable([response_id = response_id_, //
data = std::move(data), //
jni_facade = jni_facade_]() mutable {
jni_facade->FlutterViewHandlePlatformMessageResponse(response_id,
std::move(data));
}));
}
// |flutter::PlatformMessageResponse|
void PlatformMessageResponseAndroid::CompleteEmpty() {
platform_task_runner_->PostTask(
fml::MakeCopyable([response_id = response_id_, //
jni_facade = jni_facade_ //
]() {
// Make the response call into Java.
jni_facade->FlutterViewHandlePlatformMessageResponse(response_id,
nullptr);
}));
}
} // namespace flutter
| engine/shell/platform/android/platform_message_response_android.cc/0 | {
"file_path": "engine/shell/platform/android/platform_message_response_android.cc",
"repo_id": "engine",
"token_count": 748
} | 344 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/android/surface/android_surface_mock.h"
namespace flutter {
std::unique_ptr<GLContextResult> AndroidSurfaceMock::GLContextMakeCurrent() {
return std::make_unique<GLContextDefaultResult>(/*static_result=*/true);
}
bool AndroidSurfaceMock::GLContextClearCurrent() {
return true;
}
bool AndroidSurfaceMock::GLContextPresent(const GLPresentInfo& present_info) {
return true;
}
GLFBOInfo AndroidSurfaceMock::GLContextFBO(GLFrameInfo frame_info) const {
return GLFBOInfo{
.fbo_id = 0,
};
}
} // namespace flutter
| engine/shell/platform/android/surface/android_surface_mock.cc/0 | {
"file_path": "engine/shell/platform/android/surface/android_surface_mock.cc",
"repo_id": "engine",
"token_count": 234
} | 345 |
package io.flutter.embedding.android;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.withSettings;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.lifecycle.Lifecycle;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.embedding.engine.FlutterEngine;
import io.flutter.embedding.engine.FlutterEngineCache;
import io.flutter.embedding.engine.FlutterJNI;
import io.flutter.embedding.engine.FlutterShellArgs;
import io.flutter.embedding.engine.loader.FlutterLoader;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.embedding.engine.plugins.activity.ActivityAware;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.embedding.engine.systemchannels.LifecycleChannel;
import io.flutter.plugin.platform.PlatformPlugin;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.robolectric.Robolectric;
import org.robolectric.android.controller.ActivityController;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(AndroidJUnit4.class)
public class FlutterAndroidComponentTest {
private final Context ctx = ApplicationProvider.getApplicationContext();
@Test
public void pluginsReceiveFlutterPluginBinding() {
// ---- Test setup ----
// Place a FlutterEngine in the static cache.
FlutterLoader mockFlutterLoader = mock(FlutterLoader.class);
FlutterJNI mockFlutterJni = mock(FlutterJNI.class);
when(mockFlutterJni.isAttached()).thenReturn(true);
FlutterEngine cachedEngine = spy(new FlutterEngine(ctx, mockFlutterLoader, mockFlutterJni));
FlutterEngineCache.getInstance().put("my_flutter_engine", cachedEngine);
// Add mock plugin.
FlutterPlugin mockPlugin = mock(FlutterPlugin.class);
cachedEngine.getPlugins().add(mockPlugin);
// Create a fake Host, which is required by the delegate.
FakeHost fakeHost = new FakeHost(cachedEngine);
fakeHost.shouldDestroyEngineWithHost = true;
// Create the real object that we're testing.
FlutterActivityAndFragmentDelegate delegate = new FlutterActivityAndFragmentDelegate(fakeHost);
// --- Execute the behavior under test ---
// Push the delegate through all lifecycle methods all the way to destruction.
delegate.onAttach(ctx);
// Verify that the plugin is attached to the FlutterEngine.
ArgumentCaptor<FlutterPlugin.FlutterPluginBinding> pluginBindingCaptor =
ArgumentCaptor.forClass(FlutterPlugin.FlutterPluginBinding.class);
verify(mockPlugin, times(1)).onAttachedToEngine(pluginBindingCaptor.capture());
FlutterPlugin.FlutterPluginBinding binding = pluginBindingCaptor.getValue();
assertNotNull(binding.getApplicationContext());
assertNotNull(binding.getBinaryMessenger());
assertNotNull(binding.getTextureRegistry());
assertNotNull(binding.getPlatformViewRegistry());
delegate.onRestoreInstanceState(null);
delegate.onCreateView(null, null, null, 0, true);
delegate.onStart();
delegate.onResume();
delegate.onPause();
delegate.onStop();
delegate.onDestroyView();
delegate.onDetach();
// Verify the plugin was detached from the FlutterEngine.
pluginBindingCaptor = ArgumentCaptor.forClass(FlutterPlugin.FlutterPluginBinding.class);
verify(mockPlugin, times(1)).onDetachedFromEngine(pluginBindingCaptor.capture());
binding = pluginBindingCaptor.getValue();
assertNotNull(binding.getApplicationContext());
assertNotNull(binding.getBinaryMessenger());
assertNotNull(binding.getTextureRegistry());
assertNotNull(binding.getPlatformViewRegistry());
}
@Test
public void activityAwarePluginsReceiveActivityBinding() {
// ---- Test setup ----
// Place a FlutterEngine in the static cache.
FlutterLoader mockFlutterLoader = mock(FlutterLoader.class);
FlutterJNI mockFlutterJni = mock(FlutterJNI.class);
when(mockFlutterJni.isAttached()).thenReturn(true);
FlutterEngine cachedEngine = spy(new FlutterEngine(ctx, mockFlutterLoader, mockFlutterJni));
FlutterEngineCache.getInstance().put("my_flutter_engine", cachedEngine);
// Add mock plugin.
FlutterPlugin mockPlugin =
mock(FlutterPlugin.class, withSettings().extraInterfaces(ActivityAware.class));
ActivityAware activityAwarePlugin = (ActivityAware) mockPlugin;
ActivityPluginBinding.OnSaveInstanceStateListener mockSaveStateListener =
mock(ActivityPluginBinding.OnSaveInstanceStateListener.class);
// Add a OnSaveStateListener when the Activity plugin binding is made available.
doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
ActivityPluginBinding binding =
(ActivityPluginBinding) invocation.getArguments()[0];
binding.addOnSaveStateListener(mockSaveStateListener);
return null;
}
})
.when(activityAwarePlugin)
.onAttachedToActivity(any(ActivityPluginBinding.class));
cachedEngine.getPlugins().add(mockPlugin);
// Create a fake Host, which is required by the delegate.
FlutterActivityAndFragmentDelegate.Host fakeHost = new FakeHost(cachedEngine);
FlutterActivityAndFragmentDelegate delegate = new FlutterActivityAndFragmentDelegate(fakeHost);
// --- Execute the behavior under test ---
// Push the delegate through all lifecycle methods all the way to destruction.
delegate.onAttach(ctx);
// Verify plugin was given an ActivityPluginBinding.
ArgumentCaptor<ActivityPluginBinding> pluginBindingCaptor =
ArgumentCaptor.forClass(ActivityPluginBinding.class);
verify(activityAwarePlugin, times(1)).onAttachedToActivity(pluginBindingCaptor.capture());
ActivityPluginBinding binding = pluginBindingCaptor.getValue();
assertNotNull(binding.getActivity());
assertNotNull(binding.getLifecycle());
delegate.onRestoreInstanceState(null);
// Verify that after Activity creation, the plugin was allowed to restore state.
verify(mockSaveStateListener, times(1)).onRestoreInstanceState(isNull());
delegate.onCreateView(null, null, null, 0, true);
delegate.onStart();
delegate.onResume();
delegate.onPause();
delegate.onStop();
delegate.onSaveInstanceState(mock(Bundle.class));
// Verify that the plugin was allowed to save state.
verify(mockSaveStateListener, times(1)).onSaveInstanceState(any(Bundle.class));
delegate.onDestroyView();
delegate.onDetach();
// Verify that the plugin was detached from the Activity.
verify(activityAwarePlugin, times(1)).onDetachedFromActivity();
}
@Test
public void normalLifecycleStepsDoNotTriggerADetachFromFlutterEngine() {
// ---- Test setup ----
// Place a FlutterEngine in the static cache.
FlutterLoader mockFlutterLoader = mock(FlutterLoader.class);
FlutterJNI mockFlutterJni = mock(FlutterJNI.class);
when(mockFlutterJni.isAttached()).thenReturn(true);
FlutterEngine cachedEngine = spy(new FlutterEngine(ctx, mockFlutterLoader, mockFlutterJni));
FlutterEngineCache.getInstance().put("my_flutter_engine", cachedEngine);
// Create a fake Host, which is required by the delegate.
FakeHost fakeHost = new FakeHost(cachedEngine);
// Create the real object that we're testing.
FlutterActivityAndFragmentDelegate delegate =
spy(new FlutterActivityAndFragmentDelegate(fakeHost));
// --- Execute the behavior under test ---
// Push the delegate through all lifecycle methods all the way to destruction.
delegate.onAttach(ctx);
delegate.onRestoreInstanceState(null);
delegate.onCreateView(null, null, null, 0, true);
delegate.onStart();
delegate.onResume();
delegate.onPause();
delegate.onStop();
delegate.onDestroyView();
delegate.onDetach();
verify(delegate, never()).detachFromFlutterEngine();
}
@Test
public void twoOverlappingFlutterActivitiesDoNotCrosstalk() {
// ---- Test setup ----
// Place a FlutterEngine in the static cache.
FlutterLoader mockFlutterLoader = mock(FlutterLoader.class);
FlutterJNI mockFlutterJni = mock(FlutterJNI.class);
when(mockFlutterJni.isAttached()).thenReturn(true);
FlutterEngine cachedEngine = spy(new FlutterEngine(ctx, mockFlutterLoader, mockFlutterJni));
FlutterEngineCache.getInstance().put("my_flutter_engine", cachedEngine);
LifecycleChannel mockLifecycleChannel = mock(LifecycleChannel.class);
when(cachedEngine.getLifecycleChannel()).thenReturn(mockLifecycleChannel);
Intent intent = FlutterActivity.withCachedEngine("my_flutter_engine").build(ctx);
ActivityController<FlutterActivity> activityController1 =
Robolectric.buildActivity(FlutterActivity.class, intent);
activityController1.create().start().resume();
InOrder inOrder = inOrder(mockLifecycleChannel);
inOrder.verify(mockLifecycleChannel, times(1)).appIsResumed();
verifyNoMoreInteractions(mockLifecycleChannel);
activityController1.pause();
// Create a second instance on the same engine and start running it as well.
ActivityController<FlutterActivity> activityController2 =
Robolectric.buildActivity(FlutterActivity.class, intent);
activityController2.create().start().resume();
// From the onPause of the first activity.
inOrder.verify(mockLifecycleChannel, times(1)).appIsInactive();
// By creating the second activity, we should automatically detach the first activity.
inOrder.verify(mockLifecycleChannel, times(1)).appIsDetached();
// In order, the second activity then is resumed.
inOrder.verify(mockLifecycleChannel, times(1)).appIsResumed();
verifyNoMoreInteractions(mockLifecycleChannel);
// The first activity goes through the normal lifecycles of destruction, but since we
// detached the first activity during the second activity's creation, we should ignore the
// first activity's destruction events to avoid crosstalk.
activityController1.stop().destroy();
verifyNoMoreInteractions(mockLifecycleChannel);
}
private static class FakeHost implements FlutterActivityAndFragmentDelegate.Host {
final FlutterEngine cachedEngine;
Activity activity;
boolean shouldDestroyEngineWithHost = false;
Lifecycle lifecycle = mock(Lifecycle.class);
private FakeHost(@NonNull FlutterEngine flutterEngine) {
cachedEngine = flutterEngine;
}
@NonNull
@Override
public Context getContext() {
return ApplicationProvider.getApplicationContext();
}
@SuppressWarnings("deprecation")
// Robolectric.setupActivity
// TODO(reidbaker): https://github.com/flutter/flutter/issues/133151
@Nullable
@Override
public Activity getActivity() {
if (activity == null) {
activity = Robolectric.setupActivity(Activity.class);
}
return activity;
}
@NonNull
@Override
public Lifecycle getLifecycle() {
return lifecycle;
}
@NonNull
@Override
public FlutterShellArgs getFlutterShellArgs() {
return new FlutterShellArgs(new String[] {});
}
@Nullable
@Override
public String getCachedEngineId() {
return "my_flutter_engine";
}
@Nullable
@Override
public String getCachedEngineGroupId() {
return "my_flutter_engine_group";
}
@Override
public boolean shouldDestroyEngineWithHost() {
return shouldDestroyEngineWithHost;
}
@NonNull
@Override
public String getDartEntrypointFunctionName() {
return "main";
}
@Nullable
@Override
public String getDartEntrypointLibraryUri() {
return null;
}
@Nullable
@Override
public List<String> getDartEntrypointArgs() {
return null;
}
@NonNull
@Override
public String getAppBundlePath() {
return "/fake/path";
}
@Nullable
@Override
public String getInitialRoute() {
return "/";
}
@NonNull
@Override
public RenderMode getRenderMode() {
return RenderMode.surface;
}
@NonNull
@Override
public TransparencyMode getTransparencyMode() {
return TransparencyMode.transparent;
}
@Override
public ExclusiveAppComponent<Activity> getExclusiveAppComponent() {
return null;
}
@Nullable
@Override
public FlutterEngine provideFlutterEngine(@NonNull Context context) {
return cachedEngine;
}
@Nullable
@Override
public PlatformPlugin providePlatformPlugin(
@Nullable Activity activity, @NonNull FlutterEngine flutterEngine) {
return null;
}
@Override
public void configureFlutterEngine(@NonNull FlutterEngine flutterEngine) {}
@Override
public void cleanUpFlutterEngine(@NonNull FlutterEngine flutterEngine) {}
@Override
public boolean shouldAttachEngineToActivity() {
return true;
}
@Override
public boolean shouldHandleDeeplinking() {
return false;
}
@Override
public boolean shouldRestoreAndSaveState() {
return true;
}
@Override
public boolean shouldDispatchAppLifecycleState() {
return true;
}
@Override
public boolean attachToEngineAutomatically() {
return true;
}
@Override
public void onFlutterSurfaceViewCreated(@NonNull FlutterSurfaceView flutterSurfaceView) {}
@Override
public void onFlutterTextureViewCreated(@NonNull FlutterTextureView flutterTextureView) {}
@Override
public void onFlutterUiDisplayed() {}
@Override
public void onFlutterUiNoLongerDisplayed() {}
@Override
public void detachFromFlutterEngine() {}
@Override
public void updateSystemUiOverlays() {}
@Override
public boolean popSystemNavigator() {
return false;
}
@Override
public void setFrameworkHandlesBack(boolean frameworkHandlesBack) {}
}
}
| engine/shell/platform/android/test/io/flutter/embedding/android/FlutterAndroidComponentTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/embedding/android/FlutterAndroidComponentTest.java",
"repo_id": "engine",
"token_count": 4957
} | 346 |
package io.flutter.embedding.engine;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.embedding.engine.renderer.FlutterRenderer;
import io.flutter.embedding.engine.renderer.FlutterUiDisplayListener;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(AndroidJUnit4.class)
public class RenderingComponentTest {
@Test
public void flutterUiDisplayListenersCanRemoveThemselvesWhenInvoked() {
// Setup test.
FlutterJNI flutterJNI = new FlutterJNI();
FlutterRenderer flutterRenderer = new FlutterRenderer(flutterJNI);
AtomicInteger listenerInvocationCount = new AtomicInteger(0);
FlutterUiDisplayListener listener =
new FlutterUiDisplayListener() {
@Override
public void onFlutterUiDisplayed() {
// This is the behavior we're testing, but we also verify that this method
// was invoked to ensure that this test behavior executed.
flutterRenderer.removeIsDisplayingFlutterUiListener(this);
// Track the invocation to ensure this method is called once, and only once.
listenerInvocationCount.incrementAndGet();
}
@Override
public void onFlutterUiNoLongerDisplayed() {}
};
flutterRenderer.addIsDisplayingFlutterUiListener(listener);
// Execute behavior under test.
// Pretend we are the native side and tell FlutterJNI that Flutter has rendered a frame.
flutterJNI.onFirstFrame();
// Verify results.
// If we got to this point without an exception, and if our listener was called one time,
// then the behavior under test is correct.
assertEquals(1, listenerInvocationCount.get());
}
@Test
public void flutterUiDisplayListenersAddedAfterFirstFrameAreAutomaticallyInvoked() {
// Setup test.
FlutterJNI flutterJNI = new FlutterJNI();
FlutterRenderer flutterRenderer = new FlutterRenderer(flutterJNI);
FlutterUiDisplayListener listener = mock(FlutterUiDisplayListener.class);
// Pretend we are the native side and tell FlutterJNI that Flutter has rendered a frame.
flutterJNI.onFirstFrame();
// Execute behavior under test.
flutterRenderer.addIsDisplayingFlutterUiListener(listener);
// Verify results.
verify(listener, times(1)).onFlutterUiDisplayed();
}
@Test
public void flutterUiDisplayListenersAddedAfterFlutterUiDisappearsAreNotInvoked() {
// Setup test.
FlutterJNI flutterJNI = new FlutterJNI();
FlutterRenderer flutterRenderer = new FlutterRenderer(flutterJNI);
FlutterUiDisplayListener listener = mock(FlutterUiDisplayListener.class);
// Pretend we are the native side and tell FlutterJNI that Flutter has rendered a frame.
flutterJNI.onFirstFrame();
// Pretend that rendering has stopped.
flutterJNI.onRenderingStopped();
// Execute behavior under test.
flutterRenderer.addIsDisplayingFlutterUiListener(listener);
// Verify results.
verify(listener, never()).onFlutterUiDisplayed();
}
}
| engine/shell/platform/android/test/io/flutter/embedding/engine/RenderingComponentTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/embedding/engine/RenderingComponentTest.java",
"repo_id": "engine",
"token_count": 1168
} | 347 |
package io.flutter.embedding.engine.systemchannels;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.refEq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.content.res.AssetManager;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.embedding.engine.FlutterJNI;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(AndroidJUnit4.class)
public class PlatformChannelTest {
@Test
public void platformChannel_hasStringsMessage() {
MethodChannel rawChannel = mock(MethodChannel.class);
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = new DartExecutor(mockFlutterJNI, mock(AssetManager.class));
PlatformChannel fakePlatformChannel = new PlatformChannel(dartExecutor);
PlatformChannel.PlatformMessageHandler mockMessageHandler =
mock(PlatformChannel.PlatformMessageHandler.class);
fakePlatformChannel.setPlatformMessageHandler(mockMessageHandler);
Boolean returnValue = true;
when(mockMessageHandler.clipboardHasStrings()).thenReturn(returnValue);
MethodCall methodCall = new MethodCall("Clipboard.hasStrings", null);
MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
fakePlatformChannel.parsingMethodCallHandler.onMethodCall(methodCall, mockResult);
JSONObject expected = new JSONObject();
try {
expected.put("value", returnValue);
} catch (JSONException e) {
}
verify(mockResult).success(refEq(expected));
}
@Test
public void platformChannel_shareInvokeMessage() {
MethodChannel rawChannel = mock(MethodChannel.class);
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = new DartExecutor(mockFlutterJNI, mock(AssetManager.class));
PlatformChannel fakePlatformChannel = new PlatformChannel(dartExecutor);
PlatformChannel.PlatformMessageHandler mockMessageHandler =
mock(PlatformChannel.PlatformMessageHandler.class);
fakePlatformChannel.setPlatformMessageHandler(mockMessageHandler);
ArgumentCaptor<String> valueCapture = ArgumentCaptor.forClass(String.class);
doNothing().when(mockMessageHandler).share(valueCapture.capture());
final String expectedContent = "Flutter";
MethodCall methodCall = new MethodCall("Share.invoke", expectedContent);
MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
fakePlatformChannel.parsingMethodCallHandler.onMethodCall(methodCall, mockResult);
assertEquals(valueCapture.getValue(), expectedContent);
verify(mockResult).success(null);
}
}
| engine/shell/platform/android/test/io/flutter/embedding/engine/systemchannels/PlatformChannelTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/embedding/engine/systemchannels/PlatformChannelTest.java",
"repo_id": "engine",
"token_count": 952
} | 348 |
package io.flutter.plugin.mouse;
import static io.flutter.Build.API_LEVELS;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import android.annotation.TargetApi;
import android.app.Activity;
import android.view.PointerIcon;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.embedding.android.FlutterView;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.embedding.engine.systemchannels.MouseCursorChannel;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import java.util.HashMap;
import org.json.JSONException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.Robolectric;
import org.robolectric.annotation.Config;
@Config(
manifest = Config.NONE,
minSdk = API_LEVELS.API_24,
shadows = {})
@RunWith(AndroidJUnit4.class)
@TargetApi(API_LEVELS.API_24)
public class MouseCursorPluginTest {
@SuppressWarnings("deprecation")
// Robolectric.setupActivity.
// TODO(reidbaker): https://github.com/flutter/flutter/issues/133151
@Test
public void mouseCursorPlugin_SetsSystemCursorOnRequest() throws JSONException {
// Migrate to ActivityScenario by following https://github.com/robolectric/robolectric/pull/4736
// Initialize a general MouseCursorPlugin.
FlutterView testView = spy(new FlutterView(Robolectric.setupActivity(Activity.class)));
MouseCursorChannel mouseCursorChannel = new MouseCursorChannel(mock(DartExecutor.class));
MouseCursorPlugin mouseCursorPlugin = new MouseCursorPlugin(testView, mouseCursorChannel);
final StoredResult methodResult = new StoredResult();
mouseCursorChannel.synthesizeMethodCall(
new MethodCall(
"activateSystemCursor",
new HashMap<String, Object>() {
private static final long serialVersionUID = 1L;
{
put("device", 1);
put("kind", "text");
}
}),
methodResult);
verify(testView, times(1)).getSystemPointerIcon(PointerIcon.TYPE_TEXT);
verify(testView, times(1)).setPointerIcon(any(PointerIcon.class));
assertEquals(methodResult.result, Boolean.TRUE);
}
}
class StoredResult implements MethodChannel.Result {
Object result;
@Override
public void success(Object result) {
this.result = result;
}
@Override
public void error(String errorCode, String errorMessage, Object errorDetails) {}
@Override
public void notImplemented() {}
}
| engine/shell/platform/android/test/io/flutter/plugin/mouse/MouseCursorPluginTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/plugin/mouse/MouseCursorPluginTest.java",
"repo_id": "engine",
"token_count": 977
} | 349 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(AndroidJUnit4.class)
public class PreconditionsTest {
@Test
public void checkNotNull_notNull() {
// Should always return its input.
assertEquals("non-null", Preconditions.checkNotNull("non-null"));
assertEquals(42, (int) Preconditions.checkNotNull(42));
Object classParam = new Object();
assertEquals(classParam, Preconditions.checkNotNull(classParam));
}
@Test
public void checkNotNull_Null() {
assertThrows(
NullPointerException.class,
() -> {
Preconditions.checkNotNull(null);
});
}
}
| engine/shell/platform/android/test/io/flutter/util/PreconditionsTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/util/PreconditionsTest.java",
"repo_id": "engine",
"token_count": 364
} | 350 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_COMMON_ALERT_PLATFORM_NODE_DELEGATE_H_
#define FLUTTER_SHELL_PLATFORM_COMMON_ALERT_PLATFORM_NODE_DELEGATE_H_
#include "flutter/fml/macros.h"
#include "flutter/third_party/accessibility/ax/ax_node_data.h"
#include "flutter/third_party/accessibility/ax/platform/ax_platform_node_delegate_base.h"
namespace flutter {
// A delegate for a node that holds the text of an a11y alert that a
// screen-reader should announce. The delegate is used to construct an
// AXPlatformNode, and in order to serve as an alert, only needs to be able to
// hold a text announcement and make that text available to the platform node.
class AlertPlatformNodeDelegate : public ui::AXPlatformNodeDelegateBase {
public:
explicit AlertPlatformNodeDelegate(
ui::AXPlatformNodeDelegate& parent_delegate);
~AlertPlatformNodeDelegate();
// Set the alert text of the node for which this is the delegate.
void SetText(const std::u16string& text);
// |AXPlatformNodeDelegate|
gfx::NativeViewAccessible GetParent() override;
private:
// AXPlatformNodeDelegate overrides.
gfx::AcceleratedWidget GetTargetForNativeAccessibilityEvent() override;
const ui::AXUniqueId& GetUniqueId() const override;
const ui::AXNodeData& GetData() const override;
// Delegate of the parent of this node. Returned by GetParent.
ui::AXPlatformNodeDelegate& parent_delegate_;
// Node Data that contains the alert text. Returned by GetData.
ui::AXNodeData data_;
// A unique ID used to identify this node. Returned by GetUniqueId.
ui::AXUniqueId id_;
FML_DISALLOW_COPY_AND_ASSIGN(AlertPlatformNodeDelegate);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_COMMON_ALERT_PLATFORM_NODE_DELEGATE_H_
| engine/shell/platform/common/alert_platform_node_delegate.h/0 | {
"file_path": "engine/shell/platform/common/alert_platform_node_delegate.h",
"repo_id": "engine",
"token_count": 606
} | 351 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_
#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_
#include <memory>
#include <string>
#include <vector>
#include "binary_messenger.h"
#include "method_codec.h"
#include "method_result.h"
namespace flutter {
namespace internal {
// Manages the one-time sending of response data. This is an internal helper
// class for EngineMethodResult, separated out since the implementation doesn't
// vary based on the template type.
class ReplyManager {
public:
explicit ReplyManager(BinaryReply reply_handler_);
~ReplyManager();
// Prevent copying.
ReplyManager(ReplyManager const&) = delete;
ReplyManager& operator=(ReplyManager const&) = delete;
// Sends the given response data (which must either be nullptr, which
// indicates an unhandled method, or a response serialized with |codec_|) to
// the engine.
void SendResponseData(const std::vector<uint8_t>* data);
private:
BinaryReply reply_handler_;
};
} // namespace internal
// Implemention of MethodResult that sends a response to the Flutter engine
// exactly once, encoded using a given codec.
template <typename T>
class EngineMethodResult : public MethodResult<T> {
public:
// Creates a result object that will send results to |reply_handler|, encoded
// using |codec|. The |codec| pointer must remain valid for as long as this
// object exists.
EngineMethodResult(BinaryReply reply_handler, const MethodCodec<T>* codec)
: reply_manager_(
std::make_unique<internal::ReplyManager>(std::move(reply_handler))),
codec_(codec) {}
~EngineMethodResult() = default;
protected:
// |flutter::MethodResult|
void SuccessInternal(const T* result) override {
std::unique_ptr<std::vector<uint8_t>> data =
codec_->EncodeSuccessEnvelope(result);
reply_manager_->SendResponseData(data.get());
}
// |flutter::MethodResult|
void ErrorInternal(const std::string& error_code,
const std::string& error_message,
const T* error_details) override {
std::unique_ptr<std::vector<uint8_t>> data =
codec_->EncodeErrorEnvelope(error_code, error_message, error_details);
reply_manager_->SendResponseData(data.get());
}
// |flutter::MethodResult|
void NotImplementedInternal() override {
reply_manager_->SendResponseData(nullptr);
}
private:
std::unique_ptr<internal::ReplyManager> reply_manager_;
const MethodCodec<T>* codec_;
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_
| engine/shell/platform/common/client_wrapper/include/flutter/engine_method_result.h/0 | {
"file_path": "engine/shell/platform/common/client_wrapper/include/flutter/engine_method_result.h",
"repo_id": "engine",
"token_count": 949
} | 352 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_
#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_
#include <flutter_texture_registrar.h>
#include <cstdint>
#include <functional>
#include <memory>
#include <utility>
#include <variant>
namespace flutter {
// A pixel buffer texture.
class PixelBufferTexture {
public:
// A callback used for retrieving pixel buffers.
typedef std::function<const FlutterDesktopPixelBuffer*(size_t width,
size_t height)>
CopyBufferCallback;
// Creates a pixel buffer texture that uses the provided |copy_buffer_cb| to
// retrieve the buffer.
// As the callback is usually invoked from the render thread, the callee must
// take care of proper synchronization. It also needs to be ensured that the
// returned buffer isn't released prior to unregistering this texture.
explicit PixelBufferTexture(CopyBufferCallback copy_buffer_callback)
: copy_buffer_callback_(std::move(copy_buffer_callback)) {}
// Returns the callback-provided FlutterDesktopPixelBuffer that contains the
// actual pixel data. The intended surface size is specified by |width| and
// |height|.
const FlutterDesktopPixelBuffer* CopyPixelBuffer(size_t width,
size_t height) const {
return copy_buffer_callback_(width, height);
}
private:
const CopyBufferCallback copy_buffer_callback_;
};
// A GPU surface-based texture.
class GpuSurfaceTexture {
public:
// A callback used for retrieving surface descriptors.
typedef std::function<
const FlutterDesktopGpuSurfaceDescriptor*(size_t width, size_t height)>
ObtainDescriptorCallback;
GpuSurfaceTexture(FlutterDesktopGpuSurfaceType surface_type,
ObtainDescriptorCallback obtain_descriptor_callback)
: surface_type_(surface_type),
obtain_descriptor_callback_(std::move(obtain_descriptor_callback)) {}
// Returns the callback-provided FlutterDesktopGpuSurfaceDescriptor that
// contains the surface handle. The intended surface size is specified by
// |width| and |height|.
const FlutterDesktopGpuSurfaceDescriptor* ObtainDescriptor(
size_t width,
size_t height) const {
return obtain_descriptor_callback_(width, height);
}
// Gets the surface type.
FlutterDesktopGpuSurfaceType surface_type() const { return surface_type_; }
private:
const FlutterDesktopGpuSurfaceType surface_type_;
const ObtainDescriptorCallback obtain_descriptor_callback_;
};
// The available texture variants.
// Only PixelBufferTexture is currently implemented.
// Other variants are expected to be added in the future.
typedef std::variant<PixelBufferTexture, GpuSurfaceTexture> TextureVariant;
// An object keeping track of external textures.
//
// Thread safety:
// It's safe to call the member methods from any thread.
class TextureRegistrar {
public:
virtual ~TextureRegistrar() = default;
// Registers a |texture| object and returns the ID for that texture.
virtual int64_t RegisterTexture(TextureVariant* texture) = 0;
// Notifies the flutter engine that the texture object corresponding
// to |texure_id| needs to render a new frame.
//
// For PixelBufferTextures, this will effectively make the engine invoke
// the callback that was provided upon creating the texture.
virtual bool MarkTextureFrameAvailable(int64_t texture_id) = 0;
// Asynchronously unregisters an existing texture object.
// Upon completion, the optional |callback| gets invoked.
virtual void UnregisterTexture(int64_t texture_id,
std::function<void()> callback) = 0;
// Unregisters an existing texture object.
// DEPRECATED: Use UnregisterTexture(texture_id, optional_callback) instead.
virtual bool UnregisterTexture(int64_t texture_id) = 0;
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_
| engine/shell/platform/common/client_wrapper/include/flutter/texture_registrar.h/0 | {
"file_path": "engine/shell/platform/common/client_wrapper/include/flutter/texture_registrar.h",
"repo_id": "engine",
"token_count": 1366
} | 353 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/common/engine_switches.h"
#include <algorithm>
#include <cstdlib>
#include <iostream>
#include <sstream>
namespace flutter {
std::vector<std::string> GetSwitchesFromEnvironment() {
std::vector<std::string> switches;
// Read engine switches from the environment in debug/profile. If release mode
// support is needed in the future, it should likely use a whitelist.
#ifndef FLUTTER_RELEASE
const char* switch_count_key = "FLUTTER_ENGINE_SWITCHES";
const int kMaxSwitchCount = 50;
const char* switch_count_string = std::getenv(switch_count_key);
if (!switch_count_string) {
return switches;
}
int switch_count = std::min(kMaxSwitchCount, atoi(switch_count_string));
for (int i = 1; i <= switch_count; ++i) {
std::ostringstream switch_key;
switch_key << "FLUTTER_ENGINE_SWITCH_" << i;
const char* switch_value = std::getenv(switch_key.str().c_str());
if (switch_value) {
std::ostringstream switch_value_as_flag;
switch_value_as_flag << "--" << switch_value;
switches.push_back(switch_value_as_flag.str());
} else {
std::cerr << switch_count << " keys expected from " << switch_count_key
<< ", but " << switch_key.str() << " is missing." << std::endl;
}
}
#endif // !FLUTTER_RELEASE
return switches;
}
} // namespace flutter
| engine/shell/platform/common/engine_switches.cc/0 | {
"file_path": "engine/shell/platform/common/engine_switches.cc",
"repo_id": "engine",
"token_count": 538
} | 354 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/common/json_method_codec.h"
#include "flutter/shell/platform/common/client_wrapper/include/flutter/method_result_functions.h"
#include "gtest/gtest.h"
namespace flutter {
namespace {
// Returns true if the given method calls have the same method name, and their
// arguments have equivalent values.
bool MethodCallsAreEqual(const MethodCall<rapidjson::Document>& a,
const MethodCall<rapidjson::Document>& b) {
if (a.method_name() != b.method_name()) {
return false;
}
// Treat nullptr and Null as equivalent.
if ((!a.arguments() || a.arguments()->IsNull()) &&
(!b.arguments() || b.arguments()->IsNull())) {
return true;
}
return *a.arguments() == *b.arguments();
}
} // namespace
TEST(JsonMethodCodec, HandlesMethodCallsWithNullArguments) {
const JsonMethodCodec& codec = JsonMethodCodec::GetInstance();
MethodCall<rapidjson::Document> call("hello", nullptr);
auto encoded = codec.EncodeMethodCall(call);
ASSERT_TRUE(encoded);
std::unique_ptr<MethodCall<rapidjson::Document>> decoded =
codec.DecodeMethodCall(*encoded);
ASSERT_TRUE(decoded);
EXPECT_TRUE(MethodCallsAreEqual(call, *decoded));
}
TEST(JsonMethodCodec, HandlesMethodCallsWithArgument) {
const JsonMethodCodec& codec = JsonMethodCodec::GetInstance();
auto arguments = std::make_unique<rapidjson::Document>(rapidjson::kArrayType);
auto& allocator = arguments->GetAllocator();
arguments->PushBack(42, allocator);
arguments->PushBack("world", allocator);
MethodCall<rapidjson::Document> call("hello", std::move(arguments));
auto encoded = codec.EncodeMethodCall(call);
ASSERT_TRUE(encoded);
std::unique_ptr<MethodCall<rapidjson::Document>> decoded =
codec.DecodeMethodCall(*encoded);
ASSERT_TRUE(decoded);
EXPECT_TRUE(MethodCallsAreEqual(call, *decoded));
}
TEST(JsonMethodCodec, HandlesSuccessEnvelopesWithNullResult) {
const JsonMethodCodec& codec = JsonMethodCodec::GetInstance();
auto encoded = codec.EncodeSuccessEnvelope();
ASSERT_TRUE(encoded);
std::vector<uint8_t> bytes = {'[', 'n', 'u', 'l', 'l', ']'};
EXPECT_EQ(*encoded, bytes);
bool decoded_successfully = false;
MethodResultFunctions<rapidjson::Document> result_handler(
[&decoded_successfully](const rapidjson::Document* result) {
decoded_successfully = true;
EXPECT_EQ(result, nullptr);
},
nullptr, nullptr);
codec.DecodeAndProcessResponseEnvelope(encoded->data(), encoded->size(),
&result_handler);
EXPECT_TRUE(decoded_successfully);
}
TEST(JsonMethodCodec, HandlesSuccessEnvelopesWithResult) {
const JsonMethodCodec& codec = JsonMethodCodec::GetInstance();
rapidjson::Document result;
result.SetInt(42);
auto encoded = codec.EncodeSuccessEnvelope(&result);
ASSERT_TRUE(encoded);
std::vector<uint8_t> bytes = {'[', '4', '2', ']'};
EXPECT_EQ(*encoded, bytes);
bool decoded_successfully = false;
MethodResultFunctions<rapidjson::Document> result_handler(
[&decoded_successfully](const rapidjson::Document* result) {
decoded_successfully = true;
EXPECT_EQ(result->GetInt(), 42);
},
nullptr, nullptr);
codec.DecodeAndProcessResponseEnvelope(encoded->data(), encoded->size(),
&result_handler);
EXPECT_TRUE(decoded_successfully);
}
TEST(JsonMethodCodec, HandlesErrorEnvelopesWithNulls) {
const JsonMethodCodec& codec = JsonMethodCodec::GetInstance();
auto encoded = codec.EncodeErrorEnvelope("errorCode");
ASSERT_TRUE(encoded);
std::vector<uint8_t> bytes = {
'[', '"', 'e', 'r', 'r', 'o', 'r', 'C', 'o', 'd', 'e',
'"', ',', '"', '"', ',', 'n', 'u', 'l', 'l', ']',
};
EXPECT_EQ(*encoded, bytes);
bool decoded_successfully = false;
MethodResultFunctions<rapidjson::Document> result_handler(
nullptr,
[&decoded_successfully](const std::string& code,
const std::string& message,
const rapidjson::Document* details) {
decoded_successfully = true;
EXPECT_EQ(code, "errorCode");
EXPECT_EQ(message, "");
EXPECT_EQ(details, nullptr);
},
nullptr);
codec.DecodeAndProcessResponseEnvelope(encoded->data(), encoded->size(),
&result_handler);
EXPECT_TRUE(decoded_successfully);
}
TEST(JsonMethodCodec, HandlesErrorEnvelopesWithDetails) {
const JsonMethodCodec& codec = JsonMethodCodec::GetInstance();
// NOLINTNEXTLINE(clang-analyzer-core.NullDereference)
rapidjson::Document details(rapidjson::kArrayType);
auto& allocator = details.GetAllocator();
details.PushBack("a", allocator);
details.PushBack(42, allocator);
auto encoded =
codec.EncodeErrorEnvelope("errorCode", "something failed", &details);
ASSERT_NE(encoded.get(), nullptr);
std::vector<uint8_t> bytes = {
'[', '"', 'e', 'r', 'r', 'o', 'r', 'C', 'o', 'd', 'e', '"', ',', '"',
's', 'o', 'm', 'e', 't', 'h', 'i', 'n', 'g', ' ', 'f', 'a', 'i', 'l',
'e', 'd', '"', ',', '[', '"', 'a', '"', ',', '4', '2', ']', ']',
};
EXPECT_EQ(*encoded, bytes);
bool decoded_successfully = false;
MethodResultFunctions<rapidjson::Document> result_handler(
nullptr,
[&decoded_successfully](const std::string& code,
const std::string& message,
const rapidjson::Document* details) {
decoded_successfully = true;
EXPECT_EQ(code, "errorCode");
EXPECT_EQ(message, "something failed");
EXPECT_TRUE(details->IsArray());
EXPECT_EQ(std::string((*details)[0].GetString()), "a");
EXPECT_EQ((*details)[1].GetInt(), 42);
},
nullptr);
codec.DecodeAndProcessResponseEnvelope(encoded->data(), encoded->size(),
&result_handler);
EXPECT_TRUE(decoded_successfully);
}
} // namespace flutter
| engine/shell/platform/common/json_method_codec_unittests.cc/0 | {
"file_path": "engine/shell/platform/common/json_method_codec_unittests.cc",
"repo_id": "engine",
"token_count": 2474
} | 355 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_COMMON_TEXT_INPUT_MODEL_H_
#define FLUTTER_SHELL_PLATFORM_COMMON_TEXT_INPUT_MODEL_H_
#include <memory>
#include <string>
#include "flutter/shell/platform/common/text_range.h"
namespace flutter {
// Handles underlying text input state, using a simple ASCII model.
//
// Ignores special states like "insert mode" for now.
class TextInputModel {
public:
TextInputModel();
virtual ~TextInputModel();
// Sets the text, as well as the selection and the composing region.
//
// This method is typically used to update the TextInputModel's editing state
// when the Flutter framework sends its latest text editing state.
bool SetText(const std::string& text,
const TextRange& selection = TextRange(0),
const TextRange& composing_range = TextRange(0));
// Attempts to set the text selection.
//
// Returns false if the selection is not within the bounds of the text.
// While in composing mode, the selection is restricted to the composing
// range; otherwise, it is restricted to the length of the text.
//
// To update both the text and the selection/composing range within the text
// (for instance, when the framework sends its latest text editing state),
// call |SetText| instead.
bool SetSelection(const TextRange& range);
// Attempts to set the composing range.
//
// Returns false if the range or offset are out of range for the text, or if
// the offset is outside the composing range.
//
// To update both the text and the selection/composing range within the text
// (for instance, when the framework sends its latest text editing state),
// call |SetText| instead.
bool SetComposingRange(const TextRange& range, size_t cursor_offset);
// Begins IME composing mode.
//
// Resets the composing base and extent to the selection start. The existing
// selection is preserved in case composing is aborted with no changes. Until
// |EndComposing| is called, any further changes to selection base and extent
// are restricted to the composing range.
void BeginComposing();
// Replaces the composing range with new UTF-16 text, and sets the selection.
//
// The given |text| replaces text within the current composing range, or the
// current selection if the text wasn't composing. The composing range is
// adjusted to the length of |text|, and the |selection| describes the new
// selection range, relative to the start of the new composing range.
void UpdateComposingText(const std::u16string& text,
const TextRange& selection);
// Replaces the composing range with new UTF-16 text and sets the selection to
// the end of the composing text.
void UpdateComposingText(const std::u16string& text);
// Replaces the composing range with new UTF-8 text.
//
// If a selection of non-zero length exists, it is deleted if the composing
// text is non-empty. The composing range is adjusted to the length of
// |text| and the selection base and offset are set to the end of the
// composing range.
void UpdateComposingText(const std::string& text);
// Commits composing range to the string.
//
// Causes the composing base and extent to be collapsed to the end of the
// range.
void CommitComposing();
// Ends IME composing mode.
//
// Collapses the composing base and offset to 0.
void EndComposing();
// Adds a Unicode code point.
//
// Either appends after the cursor (when selection base and extent are the
// same), or deletes the selected text, replacing it with the given
// code point.
void AddCodePoint(char32_t c);
// Adds UTF-16 text.
//
// Either appends after the cursor (when selection base and extent are the
// same), or deletes the selected text, replacing it with the given text.
void AddText(const std::u16string& text);
// Adds UTF-8 text.
//
// Either appends after the cursor (when selection base and extent are the
// same), or deletes the selected text, replacing it with the given text.
void AddText(const std::string& text);
// Deletes either the selection, or one character ahead of the cursor.
//
// Deleting one character ahead of the cursor occurs when the selection base
// and extent are the same. When composing is active, deletions are
// restricted to text between the composing base and extent.
//
// Returns true if any deletion actually occurred.
bool Delete();
// Deletes text near the cursor.
//
// A section is made starting at |offset_from_cursor| code points past the
// cursor (negative values go before the cursor). |count| code points are
// removed. The selection may go outside the bounds of the available text and
// will result in only the part selection that covers the available text
// being deleted. The existing selection is ignored and removed after this
// operation. When composing is active, deletions are restricted to the
// composing range.
//
// Returns true if any deletion actually occurred.
bool DeleteSurrounding(int offset_from_cursor, int count);
// Deletes either the selection, or one character behind the cursor.
//
// Deleting one character behind the cursor occurs when the selection base
// and extent are the same. When composing is active, deletions are
// restricted to the text between the composing base and extent.
//
// Returns true if any deletion actually occurred.
bool Backspace();
// Attempts to move the cursor backward.
//
// Returns true if the cursor could be moved. If a selection is active, moves
// to the start of the selection. If composing is active, motion is
// restricted to the composing range.
bool MoveCursorBack();
// Attempts to move the cursor forward.
//
// Returns true if the cursor could be moved. If a selection is active, moves
// to the end of the selection. If composing is active, motion is restricted
// to the composing range.
bool MoveCursorForward();
// Attempts to move the cursor to the beginning.
//
// If composing is active, the cursor is moved to the beginning of the
// composing range; otherwise, it is moved to the beginning of the text. If
// composing is active, motion is restricted to the composing range.
//
// Returns true if the cursor could be moved.
bool MoveCursorToBeginning();
// Attempts to move the cursor to the end.
//
// If composing is active, the cursor is moved to the end of the composing
// range; otherwise, it is moved to the end of the text. If composing is
// active, motion is restricted to the composing range.
//
// Returns true if the cursor could be moved.
bool MoveCursorToEnd();
// Attempts to select text from the cursor position to the beginning.
//
// If composing is active, the selection is applied to the beginning of the
// composing range; otherwise, it is applied to the beginning of the text.
//
// Returns true if the selection could be applied.
bool SelectToBeginning();
// Attempts to select text from the cursor position to the end.
//
// If composing is active, the selection is applied to the end of the
// composing range; otherwise, it is moved to the end of the text.
//
// Returns true if the selection could be applied.
bool SelectToEnd();
// Gets the current text as UTF-8.
std::string GetText() const;
// Gets the cursor position as a byte offset in UTF-8 string returned from
// GetText().
int GetCursorOffset() const;
// Returns a range covering the entire text.
TextRange text_range() const { return TextRange(0, text_.length()); }
// The current selection.
TextRange selection() const { return selection_; }
// The composing range.
//
// If not in composing mode, returns a collapsed range at position 0.
TextRange composing_range() const { return composing_range_; }
// Whether multi-step input composing mode is active.
bool composing() const { return composing_; }
private:
// Deletes the current selection, if any.
//
// Returns true if any text is deleted. The selection base and extent are
// reset to the start of the selected range.
bool DeleteSelected();
// Returns the currently editable text range.
//
// In composing mode, returns the composing range; otherwise, returns a range
// covering the entire text.
TextRange editable_range() const {
return composing_ ? composing_range_ : text_range();
}
std::u16string text_;
TextRange selection_ = TextRange(0);
TextRange composing_range_ = TextRange(0);
bool composing_ = false;
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_COMMON_TEXT_INPUT_MODEL_H_
| engine/shell/platform/common/text_input_model.h/0 | {
"file_path": "engine/shell/platform/common/text_input_model.h",
"repo_id": "engine",
"token_count": 2356
} | 356 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERCHANNELS_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERCHANNELS_H_
#import "FlutterBinaryMessenger.h"
#import "FlutterCodecs.h"
NS_ASSUME_NONNULL_BEGIN
/**
* A message reply callback.
*
* Used for submitting a reply back to a Flutter message sender. Also used in
* the dual capacity for handling a message reply received from Flutter.
*
* @param reply The reply.
*/
typedef void (^FlutterReply)(id _Nullable reply);
/**
* A strategy for handling incoming messages from Flutter and to send
* asynchronous replies back to Flutter.
*
* @param message The message.
* @param callback A callback for submitting a reply to the sender which can be invoked from any
* thread.
*/
typedef void (^FlutterMessageHandler)(id _Nullable message, FlutterReply callback);
/**
* A channel for communicating with the Flutter side using basic, asynchronous
* message passing.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterBasicMessageChannel : NSObject
/**
* Creates a `FlutterBasicMessageChannel` with the specified name and binary
* messenger.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* The channel uses `FlutterStandardMessageCodec` to encode and decode messages.
*
* @param name The channel name.
* @param messenger The binary messenger.
*/
+ (instancetype)messageChannelWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger;
/**
* Creates a `FlutterBasicMessageChannel` with the specified name, binary
* messenger, and message codec.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The message codec.
*/
+ (instancetype)messageChannelWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMessageCodec>*)codec;
/**
* Initializes a `FlutterBasicMessageChannel` with the specified name, binary
* messenger, and message codec.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The message codec.
*/
- (instancetype)initWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMessageCodec>*)codec;
/**
* Initializes a `FlutterBasicMessageChannel` with the specified name, binary
* messenger, and message codec.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The message codec.
* @param taskQueue The FlutterTaskQueue that executes the handler (see
-[FlutterBinaryMessenger makeBackgroundTaskQueue]).
*/
- (instancetype)initWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMessageCodec>*)codec
taskQueue:(NSObject<FlutterTaskQueue>* _Nullable)taskQueue;
/**
* Sends the specified message to the Flutter side, ignoring any reply.
*
* @param message The message. Must be supported by the codec of this
* channel.
*/
- (void)sendMessage:(id _Nullable)message;
/**
* Sends the specified message to the Flutter side, expecting an asynchronous
* reply.
*
* @param message The message. Must be supported by the codec of this channel.
* @param callback A callback to be invoked with the message reply from Flutter.
*/
- (void)sendMessage:(id _Nullable)message reply:(FlutterReply _Nullable)callback;
/**
* Registers a message handler with this channel.
*
* Replaces any existing handler. Use a `nil` handler for unregistering the
* existing handler.
*
* @param handler The message handler.
*/
- (void)setMessageHandler:(FlutterMessageHandler _Nullable)handler;
/**
* Adjusts the number of messages that will get buffered when sending messages to
* channels that aren't fully set up yet. For example, the engine isn't running
* yet or the channel's message handler isn't set up on the Dart side yet.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param newSize The number of messages that will get buffered.
*/
+ (void)resizeChannelWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
size:(NSInteger)newSize;
/**
* Adjusts the number of messages that will get buffered when sending messages to
* channels that aren't fully set up yet. For example, the engine isn't running
* yet or the channel's message handler isn't set up on the Dart side yet.
*
* @param newSize The number of messages that will get buffered.
*/
- (void)resizeChannelBuffer:(NSInteger)newSize;
/**
* Defines whether the channel should show warning messages when discarding messages
* due to overflow.
*
* @param warns When false, the channel is expected to overflow and warning messages
* will not be shown.
* @param name The channel name.
* @param messenger The binary messenger.
*/
+ (void)setWarnsOnOverflow:(BOOL)warns
forChannelWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger;
/**
* Defines whether the channel should show warning messages when discarding messages
* due to overflow.
*
* @param warns When false, the channel is expected to overflow and warning messages
* will not be shown.
*/
- (void)setWarnsOnOverflow:(BOOL)warns;
@end
/**
* A method call result callback.
*
* Used for submitting a method call result back to a Flutter caller. Also used in
* the dual capacity for handling a method call result received from Flutter.
*
* @param result The result.
*/
typedef void (^FlutterResult)(id _Nullable result);
/**
* A strategy for handling method calls.
*
* @param call The incoming method call.
* @param result A callback to asynchronously submit the result of the call.
* Invoke the callback with a `FlutterError` to indicate that the call failed.
* Invoke the callback with `FlutterMethodNotImplemented` to indicate that the
* method was unknown. Any other values, including `nil`, are interpreted as
* successful results. This can be invoked from any thread.
*/
typedef void (^FlutterMethodCallHandler)(FlutterMethodCall* call, FlutterResult result);
/**
* A constant used with `FlutterMethodCallHandler` to respond to the call of an
* unknown method.
*/
FLUTTER_DARWIN_EXPORT
extern NSObject const* FlutterMethodNotImplemented;
/**
* A channel for communicating with the Flutter side using invocation of
* asynchronous methods.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterMethodChannel : NSObject
/**
* Creates a `FlutterMethodChannel` with the specified name and binary messenger.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* The channel uses `FlutterStandardMethodCodec` to encode and decode method calls
* and result envelopes.
*
* @param name The channel name.
* @param messenger The binary messenger.
*/
+ (instancetype)methodChannelWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger;
/**
* Creates a `FlutterMethodChannel` with the specified name, binary messenger, and
* method codec.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The method codec.
*/
+ (instancetype)methodChannelWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMethodCodec>*)codec;
/**
* Initializes a `FlutterMethodChannel` with the specified name, binary messenger,
* and method codec.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The method codec.
*/
- (instancetype)initWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMethodCodec>*)codec;
/**
* Initializes a `FlutterMethodChannel` with the specified name, binary messenger,
* method codec, and task queue.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The method codec.
* @param taskQueue The FlutterTaskQueue that executes the handler (see
-[FlutterBinaryMessenger makeBackgroundTaskQueue]).
*/
- (instancetype)initWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMethodCodec>*)codec
taskQueue:(NSObject<FlutterTaskQueue>* _Nullable)taskQueue;
// clang-format off
/**
* Invokes the specified Flutter method with the specified arguments, expecting
* no results.
*
* @see [MethodChannel.setMethodCallHandler](https://api.flutter.dev/flutter/services/MethodChannel/setMethodCallHandler.html)
*
* @param method The name of the method to invoke.
* @param arguments The arguments. Must be a value supported by the codec of this
* channel.
*/
// clang-format on
- (void)invokeMethod:(NSString*)method arguments:(id _Nullable)arguments;
/**
* Invokes the specified Flutter method with the specified arguments, expecting
* an asynchronous result.
*
* @param method The name of the method to invoke.
* @param arguments The arguments. Must be a value supported by the codec of this
* channel.
* @param callback A callback that will be invoked with the asynchronous result.
* The result will be a `FlutterError` instance, if the method call resulted
* in an error on the Flutter side. Will be `FlutterMethodNotImplemented`, if
* the method called was not implemented on the Flutter side. Any other value,
* including `nil`, should be interpreted as successful results.
*/
- (void)invokeMethod:(NSString*)method
arguments:(id _Nullable)arguments
result:(FlutterResult _Nullable)callback;
/**
* Registers a handler for method calls from the Flutter side.
*
* Replaces any existing handler. Use a `nil` handler for unregistering the
* existing handler.
*
* @param handler The method call handler.
*/
- (void)setMethodCallHandler:(FlutterMethodCallHandler _Nullable)handler;
/**
* Adjusts the number of messages that will get buffered when sending messages to
* channels that aren't fully set up yet. For example, the engine isn't running
* yet or the channel's message handler isn't set up on the Dart side yet.
*/
- (void)resizeChannelBuffer:(NSInteger)newSize;
@end
/**
* An event sink callback.
*
* @param event The event.
*/
typedef void (^FlutterEventSink)(id _Nullable event);
/**
* A strategy for exposing an event stream to the Flutter side.
*/
FLUTTER_DARWIN_EXPORT
@protocol FlutterStreamHandler
/**
* Sets up an event stream and begin emitting events.
*
* Invoked when the first listener is registered with the Stream associated to
* this channel on the Flutter side.
*
* @param arguments Arguments for the stream.
* @param events A callback to asynchronously emit events. Invoke the
* callback with a `FlutterError` to emit an error event. Invoke the
* callback with `FlutterEndOfEventStream` to indicate that no more
* events will be emitted. Any other value, including `nil` are emitted as
* successful events.
* @return A FlutterError instance, if setup fails.
*/
- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments
eventSink:(FlutterEventSink)events;
/**
* Tears down an event stream.
*
* Invoked when the last listener is deregistered from the Stream associated to
* this channel on the Flutter side.
*
* The channel implementation may call this method with `nil` arguments
* to separate a pair of two consecutive set up requests. Such request pairs
* may occur during Flutter hot restart.
*
* @param arguments Arguments for the stream.
* @return A FlutterError instance, if teardown fails.
*/
- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments;
@end
/**
* A constant used with `FlutterEventChannel` to indicate end of stream.
*/
FLUTTER_DARWIN_EXPORT
extern NSObject const* FlutterEndOfEventStream;
/**
* A channel for communicating with the Flutter side using event streams.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterEventChannel : NSObject
/**
* Creates a `FlutterEventChannel` with the specified name and binary messenger.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterViewController`.
*
* The channel uses `FlutterStandardMethodCodec` to decode stream setup and
* teardown requests, and to encode event envelopes.
*
* @param name The channel name.
* @param messenger The binary messenger.
*/
+ (instancetype)eventChannelWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger;
/**
* Creates a `FlutterEventChannel` with the specified name, binary messenger,
* and method codec.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The method codec.
*/
+ (instancetype)eventChannelWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMethodCodec>*)codec;
/**
* Initializes a `FlutterEventChannel` with the specified name, binary messenger,
* and method codec.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The method codec.
*/
- (instancetype)initWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMethodCodec>*)codec;
/**
* Initializes a `FlutterEventChannel` with the specified name, binary messenger,
* method codec and task queue.
*
* The channel name logically identifies the channel; identically named channels
* interfere with each other's communication.
*
* The binary messenger is a facility for sending raw, binary messages to the
* Flutter side. This protocol is implemented by `FlutterEngine` and `FlutterViewController`.
*
* @param name The channel name.
* @param messenger The binary messenger.
* @param codec The method codec.
* @param taskQueue The FlutterTaskQueue that executes the handler (see
-[FlutterBinaryMessenger makeBackgroundTaskQueue]).
*/
- (instancetype)initWithName:(NSString*)name
binaryMessenger:(NSObject<FlutterBinaryMessenger>*)messenger
codec:(NSObject<FlutterMethodCodec>*)codec
taskQueue:(NSObject<FlutterTaskQueue>* _Nullable)taskQueue;
/**
* Registers a handler for stream setup requests from the Flutter side.
*
* Replaces any existing handler. Use a `nil` handler for unregistering the
* existing handler.
*
* @param handler The stream handler.
*/
- (void)setStreamHandler:(NSObject<FlutterStreamHandler>* _Nullable)handler;
@end
NS_ASSUME_NONNULL_END
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERCHANNELS_H_
| engine/shell/platform/darwin/common/framework/Headers/FlutterChannels.h/0 | {
"file_path": "engine/shell/platform/darwin/common/framework/Headers/FlutterChannels.h",
"repo_id": "engine",
"token_count": 5414
} | 357 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_SOURCE_FLUTTERSTANDARDCODEC_INTERNAL_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_SOURCE_FLUTTERSTANDARDCODEC_INTERNAL_H_
#import "flutter/shell/platform/darwin/common/framework/Headers/FlutterCodecs.h"
#import "flutter/shell/platform/darwin/common/framework/Source/FlutterStandardCodecHelper.h"
namespace flutter {
FlutterStandardField FlutterStandardFieldForDataType(
FlutterStandardDataType type) {
switch (type) {
case FlutterStandardDataTypeUInt8:
return FlutterStandardFieldUInt8Data;
case FlutterStandardDataTypeInt32:
return FlutterStandardFieldInt32Data;
case FlutterStandardDataTypeInt64:
return FlutterStandardFieldInt64Data;
case FlutterStandardDataTypeFloat32:
return FlutterStandardFieldFloat32Data;
case FlutterStandardDataTypeFloat64:
return FlutterStandardFieldFloat64Data;
}
}
FlutterStandardDataType FlutterStandardDataTypeForField(
FlutterStandardField field) {
switch (field) {
case FlutterStandardFieldUInt8Data:
return FlutterStandardDataTypeUInt8;
case FlutterStandardFieldInt32Data:
return FlutterStandardDataTypeInt32;
case FlutterStandardFieldInt64Data:
return FlutterStandardDataTypeInt64;
case FlutterStandardFieldFloat32Data:
return FlutterStandardDataTypeFloat32;
case FlutterStandardFieldFloat64Data:
return FlutterStandardDataTypeFloat64;
default:
return FlutterStandardDataTypeUInt8;
}
}
UInt8 elementSizeForFlutterStandardDataType(FlutterStandardDataType type) {
switch (type) {
case FlutterStandardDataTypeUInt8:
return 1;
case FlutterStandardDataTypeInt32:
return 4;
case FlutterStandardDataTypeInt64:
return 8;
case FlutterStandardDataTypeFloat32:
return 4;
case FlutterStandardDataTypeFloat64:
return 8;
}
}
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_SOURCE_FLUTTERSTANDARDCODEC_INTERNAL_H_
| engine/shell/platform/darwin/common/framework/Source/FlutterStandardCodec_Internal.h/0 | {
"file_path": "engine/shell/platform/darwin/common/framework/Source/FlutterStandardCodec_Internal.h",
"repo_id": "engine",
"token_count": 777
} | 358 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTER_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTER_H_
#import "FlutterAppDelegate.h"
#import "FlutterBinaryMessenger.h"
#import "FlutterCallbackCache.h"
#import "FlutterChannels.h"
#import "FlutterCodecs.h"
#import "FlutterDartProject.h"
#import "FlutterEngine.h"
#import "FlutterEngineGroup.h"
#import "FlutterHeadlessDartRunner.h"
#import "FlutterMacros.h"
#import "FlutterPlatformViews.h"
#import "FlutterPlugin.h"
#import "FlutterPluginAppLifeCycleDelegate.h"
#import "FlutterTexture.h"
#import "FlutterViewController.h"
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTER_H_
| engine/shell/platform/darwin/ios/framework/Headers/Flutter.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Headers/Flutter.h",
"repo_id": "engine",
"token_count": 329
} | 359 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERCALLBACKCACHE_INTERNAL_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERCALLBACKCACHE_INTERNAL_H_
#import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterCallbackCache.h"
@interface FlutterCallbackCache ()
+ (void)setCachePath:(NSString*)path;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERCALLBACKCACHE_INTERNAL_H_
| engine/shell/platform/darwin/ios/framework/Source/FlutterCallbackCache_Internal.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterCallbackCache_Internal.h",
"repo_id": "engine",
"token_count": 236
} | 360 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import <Foundation/Foundation.h>
#import <OCMock/OCMock.h>
#import <XCTest/XCTest.h>
#import <objc/runtime.h>
#import "flutter/common/settings.h"
#include "flutter/fml/synchronization/sync_switch.h"
#import "flutter/shell/platform/darwin/common/framework/Headers/FlutterMacros.h"
#import "flutter/shell/platform/darwin/common/framework/Source/FlutterBinaryMessengerRelay.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterDartProject_Internal.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterEngine_Internal.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterEngine_Test.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterTextInputPlugin.h"
#import "flutter/shell/platform/darwin/ios/platform_view_ios.h"
FLUTTER_ASSERT_ARC
@interface FlutterEngineSpy : FlutterEngine
@property(nonatomic) BOOL ensureSemanticsEnabledCalled;
@end
@implementation FlutterEngineSpy
- (void)ensureSemanticsEnabled {
_ensureSemanticsEnabledCalled = YES;
}
@end
@interface FlutterEngine () <FlutterTextInputDelegate>
@end
/// FlutterBinaryMessengerRelay used for testing that setting FlutterEngine.binaryMessenger to
/// the current instance doesn't trigger a use-after-free bug.
///
/// See: testSetBinaryMessengerToSameBinaryMessenger
@interface FakeBinaryMessengerRelay : FlutterBinaryMessengerRelay
@property(nonatomic, assign) BOOL failOnDealloc;
@end
@implementation FakeBinaryMessengerRelay
- (void)dealloc {
if (_failOnDealloc) {
XCTFail("FakeBinaryMessageRelay should not be deallocated");
}
}
@end
@interface FlutterEngineTest : XCTestCase
@end
@implementation FlutterEngineTest
- (void)setUp {
}
- (void)tearDown {
}
- (void)testCreate {
FlutterDartProject* project = [[FlutterDartProject alloc] init];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
XCTAssertNotNil(engine);
}
- (void)testInfoPlist {
// Check the embedded Flutter.framework Info.plist, not the linked dylib.
NSURL* flutterFrameworkURL =
[NSBundle.mainBundle.privateFrameworksURL URLByAppendingPathComponent:@"Flutter.framework"];
NSBundle* flutterBundle = [NSBundle bundleWithURL:flutterFrameworkURL];
XCTAssertEqualObjects(flutterBundle.bundleIdentifier, @"io.flutter.flutter");
NSDictionary<NSString*, id>* infoDictionary = flutterBundle.infoDictionary;
// OS version can have one, two, or three digits: "8", "8.0", "8.0.0"
NSError* regexError = NULL;
NSRegularExpression* osVersionRegex =
[NSRegularExpression regularExpressionWithPattern:@"((0|[1-9]\\d*)\\.)*(0|[1-9]\\d*)"
options:NSRegularExpressionCaseInsensitive
error:®exError];
XCTAssertNil(regexError);
// Smoke test the test regex.
NSString* testString = @"9";
NSUInteger versionMatches =
[osVersionRegex numberOfMatchesInString:testString
options:NSMatchingAnchored
range:NSMakeRange(0, testString.length)];
XCTAssertEqual(versionMatches, 1UL);
testString = @"9.1";
versionMatches = [osVersionRegex numberOfMatchesInString:testString
options:NSMatchingAnchored
range:NSMakeRange(0, testString.length)];
XCTAssertEqual(versionMatches, 1UL);
testString = @"9.0.1";
versionMatches = [osVersionRegex numberOfMatchesInString:testString
options:NSMatchingAnchored
range:NSMakeRange(0, testString.length)];
XCTAssertEqual(versionMatches, 1UL);
testString = @".0.1";
versionMatches = [osVersionRegex numberOfMatchesInString:testString
options:NSMatchingAnchored
range:NSMakeRange(0, testString.length)];
XCTAssertEqual(versionMatches, 0UL);
// Test Info.plist values.
NSString* minimumOSVersion = infoDictionary[@"MinimumOSVersion"];
versionMatches = [osVersionRegex numberOfMatchesInString:minimumOSVersion
options:NSMatchingAnchored
range:NSMakeRange(0, minimumOSVersion.length)];
XCTAssertEqual(versionMatches, 1UL);
// SHA length is 40.
XCTAssertEqual(((NSString*)infoDictionary[@"FlutterEngine"]).length, 40UL);
// {clang_version} placeholder is 15 characters. The clang string version
// is longer than that, so check if the placeholder has been replaced, without
// actually checking a literal string, which could be different on various machines.
XCTAssertTrue(((NSString*)infoDictionary[@"ClangVersion"]).length > 15UL);
}
- (void)testDeallocated {
__weak FlutterEngine* weakEngine = nil;
@autoreleasepool {
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar"];
weakEngine = engine;
[engine run];
XCTAssertNotNil(weakEngine);
}
XCTAssertNil(weakEngine);
}
- (void)testSendMessageBeforeRun {
FlutterDartProject* project = [[FlutterDartProject alloc] init];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
XCTAssertNotNil(engine);
XCTAssertThrows([engine.binaryMessenger
sendOnChannel:@"foo"
message:[@"bar" dataUsingEncoding:NSUTF8StringEncoding]
binaryReply:nil]);
}
- (void)testSetMessageHandlerBeforeRun {
FlutterDartProject* project = [[FlutterDartProject alloc] init];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
XCTAssertNotNil(engine);
XCTAssertThrows([engine.binaryMessenger
setMessageHandlerOnChannel:@"foo"
binaryMessageHandler:^(NSData* _Nullable message, FlutterBinaryReply _Nonnull reply){
}]);
}
- (void)testNilSetMessageHandlerBeforeRun {
FlutterDartProject* project = [[FlutterDartProject alloc] init];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
XCTAssertNotNil(engine);
XCTAssertNoThrow([engine.binaryMessenger setMessageHandlerOnChannel:@"foo"
binaryMessageHandler:nil]);
}
- (void)testNotifyPluginOfDealloc {
id plugin = OCMProtocolMock(@protocol(FlutterPlugin));
OCMStub([plugin detachFromEngineForRegistrar:[OCMArg any]]);
{
FlutterDartProject* project = [[FlutterDartProject alloc] init];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"engine" project:project];
NSObject<FlutterPluginRegistrar>* registrar = [engine registrarForPlugin:@"plugin"];
[registrar publish:plugin];
engine = nil;
}
OCMVerify([plugin detachFromEngineForRegistrar:[OCMArg any]]);
}
- (void)testSetBinaryMessengerToSameBinaryMessenger {
FakeBinaryMessengerRelay* fakeBinaryMessenger = [[FakeBinaryMessengerRelay alloc] init];
FlutterEngine* engine = [[FlutterEngine alloc] init];
[engine setBinaryMessenger:fakeBinaryMessenger];
// Verify that the setter doesn't free the old messenger before setting the new messenger.
fakeBinaryMessenger.failOnDealloc = YES;
[engine setBinaryMessenger:fakeBinaryMessenger];
// Don't fail when ARC releases the binary messenger.
fakeBinaryMessenger.failOnDealloc = NO;
}
- (void)testRunningInitialRouteSendsNavigationMessage {
id mockBinaryMessenger = OCMClassMock([FlutterBinaryMessengerRelay class]);
FlutterEngine* engine = [[FlutterEngine alloc] init];
[engine setBinaryMessenger:mockBinaryMessenger];
// Run with an initial route.
[engine runWithEntrypoint:FlutterDefaultDartEntrypoint initialRoute:@"test"];
// Now check that an encoded method call has been made on the binary messenger to set the
// initial route to "test".
FlutterMethodCall* setInitialRouteMethodCall =
[FlutterMethodCall methodCallWithMethodName:@"setInitialRoute" arguments:@"test"];
NSData* encodedSetInitialRouteMethod =
[[FlutterJSONMethodCodec sharedInstance] encodeMethodCall:setInitialRouteMethodCall];
OCMVerify([mockBinaryMessenger sendOnChannel:@"flutter/navigation"
message:encodedSetInitialRouteMethod]);
}
- (void)testInitialRouteSettingsSendsNavigationMessage {
id mockBinaryMessenger = OCMClassMock([FlutterBinaryMessengerRelay class]);
auto settings = FLTDefaultSettingsForBundle();
settings.route = "test";
FlutterDartProject* project = [[FlutterDartProject alloc] initWithSettings:settings];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
[engine setBinaryMessenger:mockBinaryMessenger];
[engine run];
// Now check that an encoded method call has been made on the binary messenger to set the
// initial route to "test".
FlutterMethodCall* setInitialRouteMethodCall =
[FlutterMethodCall methodCallWithMethodName:@"setInitialRoute" arguments:@"test"];
NSData* encodedSetInitialRouteMethod =
[[FlutterJSONMethodCodec sharedInstance] encodeMethodCall:setInitialRouteMethodCall];
OCMVerify([mockBinaryMessenger sendOnChannel:@"flutter/navigation"
message:encodedSetInitialRouteMethod]);
}
- (void)testPlatformViewsControllerRenderingMetalBackend {
FlutterEngine* engine = [[FlutterEngine alloc] init];
[engine run];
flutter::IOSRenderingAPI renderingApi = [engine platformViewsRenderingAPI];
XCTAssertEqual(renderingApi, flutter::IOSRenderingAPI::kMetal);
}
- (void)testPlatformViewsControllerRenderingSoftware {
auto settings = FLTDefaultSettingsForBundle();
settings.enable_software_rendering = true;
FlutterDartProject* project = [[FlutterDartProject alloc] initWithSettings:settings];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
[engine run];
flutter::IOSRenderingAPI renderingApi = [engine platformViewsRenderingAPI];
XCTAssertEqual(renderingApi, flutter::IOSRenderingAPI::kSoftware);
}
- (void)testWaitForFirstFrameTimeout {
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar"];
[engine run];
XCTestExpectation* timeoutFirstFrame = [self expectationWithDescription:@"timeoutFirstFrame"];
[engine waitForFirstFrame:0.1
callback:^(BOOL didTimeout) {
if (timeoutFirstFrame) {
[timeoutFirstFrame fulfill];
}
}];
[self waitForExpectationsWithTimeout:5 handler:nil];
}
- (void)testSpawn {
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar"];
[engine run];
FlutterEngine* spawn = [engine spawnWithEntrypoint:nil
libraryURI:nil
initialRoute:nil
entrypointArgs:nil];
XCTAssertNotNil(spawn);
}
- (void)testDeallocNotification {
XCTestExpectation* deallocNotification = [self expectationWithDescription:@"deallocNotification"];
NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
id<NSObject> observer;
@autoreleasepool {
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar"];
observer = [center addObserverForName:kFlutterEngineWillDealloc
object:engine
queue:[NSOperationQueue mainQueue]
usingBlock:^(NSNotification* note) {
[deallocNotification fulfill];
}];
}
[self waitForExpectationsWithTimeout:1 handler:nil];
[center removeObserver:observer];
}
- (void)testSetHandlerAfterRun {
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar"];
XCTestExpectation* gotMessage = [self expectationWithDescription:@"gotMessage"];
dispatch_async(dispatch_get_main_queue(), ^{
NSObject<FlutterPluginRegistrar>* registrar = [engine registrarForPlugin:@"foo"];
fml::AutoResetWaitableEvent latch;
[engine run];
flutter::Shell& shell = engine.shell;
engine.shell.GetTaskRunners().GetUITaskRunner()->PostTask([&latch, &shell] {
flutter::Engine::Delegate& delegate = shell;
auto message = std::make_unique<flutter::PlatformMessage>("foo", nullptr);
delegate.OnEngineHandlePlatformMessage(std::move(message));
latch.Signal();
});
latch.Wait();
[registrar.messenger setMessageHandlerOnChannel:@"foo"
binaryMessageHandler:^(NSData* message, FlutterBinaryReply reply) {
[gotMessage fulfill];
}];
});
[self waitForExpectationsWithTimeout:1 handler:nil];
}
- (void)testThreadPrioritySetCorrectly {
XCTestExpectation* prioritiesSet = [self expectationWithDescription:@"prioritiesSet"];
prioritiesSet.expectedFulfillmentCount = 3;
IMP mockSetThreadPriority =
imp_implementationWithBlock(^(NSThread* thread, double threadPriority) {
if ([thread.name hasSuffix:@".ui"]) {
XCTAssertEqual(threadPriority, 1.0);
[prioritiesSet fulfill];
} else if ([thread.name hasSuffix:@".raster"]) {
XCTAssertEqual(threadPriority, 1.0);
[prioritiesSet fulfill];
} else if ([thread.name hasSuffix:@".io"]) {
XCTAssertEqual(threadPriority, 0.5);
[prioritiesSet fulfill];
}
});
Method method = class_getInstanceMethod([NSThread class], @selector(setThreadPriority:));
IMP originalSetThreadPriority = method_getImplementation(method);
method_setImplementation(method, mockSetThreadPriority);
FlutterEngine* engine = [[FlutterEngine alloc] init];
[engine run];
[self waitForExpectationsWithTimeout:1 handler:nil];
method_setImplementation(method, originalSetThreadPriority);
}
- (void)testCanEnableDisableEmbedderAPIThroughInfoPlist {
{
// Not enable embedder API by default
auto settings = FLTDefaultSettingsForBundle();
settings.enable_software_rendering = true;
FlutterDartProject* project = [[FlutterDartProject alloc] initWithSettings:settings];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
XCTAssertFalse(engine.enableEmbedderAPI);
}
{
// Enable embedder api
id mockMainBundle = OCMPartialMock([NSBundle mainBundle]);
OCMStub([mockMainBundle objectForInfoDictionaryKey:@"FLTEnableIOSEmbedderAPI"])
.andReturn(@"YES");
auto settings = FLTDefaultSettingsForBundle();
settings.enable_software_rendering = true;
FlutterDartProject* project = [[FlutterDartProject alloc] initWithSettings:settings];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
XCTAssertTrue(engine.enableEmbedderAPI);
}
}
- (void)testFlutterTextInputViewDidResignFirstResponderWillCallTextInputClientConnectionClosed {
id mockBinaryMessenger = OCMClassMock([FlutterBinaryMessengerRelay class]);
FlutterEngine* engine = [[FlutterEngine alloc] init];
[engine setBinaryMessenger:mockBinaryMessenger];
[engine runWithEntrypoint:FlutterDefaultDartEntrypoint initialRoute:@"test"];
[engine flutterTextInputView:nil didResignFirstResponderWithTextInputClient:1];
FlutterMethodCall* methodCall =
[FlutterMethodCall methodCallWithMethodName:@"TextInputClient.onConnectionClosed"
arguments:@[ @(1) ]];
NSData* encodedMethodCall = [[FlutterJSONMethodCodec sharedInstance] encodeMethodCall:methodCall];
OCMVerify([mockBinaryMessenger sendOnChannel:@"flutter/textinput" message:encodedMethodCall]);
}
- (void)testFlutterEngineUpdatesDisplays {
FlutterEngine* engine = [[FlutterEngine alloc] init];
id mockEngine = OCMPartialMock(engine);
[engine run];
OCMVerify(times(1), [mockEngine updateDisplays]);
engine.viewController = nil;
OCMVerify(times(2), [mockEngine updateDisplays]);
}
- (void)testLifeCycleNotificationDidEnterBackground {
FlutterDartProject* project = [[FlutterDartProject alloc] init];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
[engine run];
NSNotification* sceneNotification =
[NSNotification notificationWithName:UISceneDidEnterBackgroundNotification
object:nil
userInfo:nil];
NSNotification* applicationNotification =
[NSNotification notificationWithName:UIApplicationDidEnterBackgroundNotification
object:nil
userInfo:nil];
id mockEngine = OCMPartialMock(engine);
[[NSNotificationCenter defaultCenter] postNotification:sceneNotification];
[[NSNotificationCenter defaultCenter] postNotification:applicationNotification];
#if APPLICATION_EXTENSION_API_ONLY
OCMVerify(times(1), [mockEngine sceneDidEnterBackground:[OCMArg any]]);
#else
OCMVerify(times(1), [mockEngine applicationDidEnterBackground:[OCMArg any]]);
#endif
XCTAssertTrue(engine.isGpuDisabled);
bool switch_value = false;
[engine shell].GetIsGpuDisabledSyncSwitch()->Execute(
fml::SyncSwitch::Handlers().SetIfTrue([&] { switch_value = true; }).SetIfFalse([&] {
switch_value = false;
}));
XCTAssertTrue(switch_value);
}
- (void)testLifeCycleNotificationWillEnterForeground {
FlutterDartProject* project = [[FlutterDartProject alloc] init];
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar" project:project];
[engine run];
NSNotification* sceneNotification =
[NSNotification notificationWithName:UISceneWillEnterForegroundNotification
object:nil
userInfo:nil];
NSNotification* applicationNotification =
[NSNotification notificationWithName:UIApplicationWillEnterForegroundNotification
object:nil
userInfo:nil];
id mockEngine = OCMPartialMock(engine);
[[NSNotificationCenter defaultCenter] postNotification:sceneNotification];
[[NSNotificationCenter defaultCenter] postNotification:applicationNotification];
#if APPLICATION_EXTENSION_API_ONLY
OCMVerify(times(1), [mockEngine sceneWillEnterForeground:[OCMArg any]]);
#else
OCMVerify(times(1), [mockEngine applicationWillEnterForeground:[OCMArg any]]);
#endif
XCTAssertFalse(engine.isGpuDisabled);
bool switch_value = true;
[engine shell].GetIsGpuDisabledSyncSwitch()->Execute(
fml::SyncSwitch::Handlers().SetIfTrue([&] { switch_value = true; }).SetIfFalse([&] {
switch_value = false;
}));
XCTAssertFalse(switch_value);
}
- (void)testSpawnsShareGpuContext {
FlutterEngine* engine = [[FlutterEngine alloc] initWithName:@"foobar"];
[engine run];
FlutterEngine* spawn = [engine spawnWithEntrypoint:nil
libraryURI:nil
initialRoute:nil
entrypointArgs:nil];
XCTAssertNotNil(spawn);
XCTAssertTrue([engine iosPlatformView] != nullptr);
XCTAssertTrue([spawn iosPlatformView] != nullptr);
std::shared_ptr<flutter::IOSContext> engine_context = [engine iosPlatformView]->GetIosContext();
std::shared_ptr<flutter::IOSContext> spawn_context = [spawn iosPlatformView]->GetIosContext();
XCTAssertEqual(engine_context, spawn_context);
// If this assert fails it means we may be using the software. For software rendering, this is
// expected to be nullptr.
XCTAssertTrue(engine_context->GetMainContext() != nullptr);
XCTAssertEqual(engine_context->GetMainContext(), spawn_context->GetMainContext());
}
- (void)testEnableSemanticsWhenFlutterViewAccessibilityDidCall {
FlutterEngineSpy* engine = [[FlutterEngineSpy alloc] initWithName:@"foobar"];
engine.ensureSemanticsEnabledCalled = NO;
[engine flutterViewAccessibilityDidCall];
XCTAssertTrue(engine.ensureSemanticsEnabledCalled);
}
@end
| engine/shell/platform/darwin/ios/framework/Source/FlutterEngineTest.mm/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterEngineTest.mm",
"repo_id": "engine",
"token_count": 7774
} | 361 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterOverlayView.h"
#include <CoreGraphics/CGColorSpace.h>
#include <Metal/Metal.h>
#include "flutter/common/settings.h"
#include "flutter/common/task_runners.h"
#include "flutter/flow/layers/layer_tree.h"
#include "flutter/fml/platform/darwin/cf_utils.h"
#include "flutter/fml/synchronization/waitable_event.h"
#include "flutter/fml/trace_event.h"
#include "flutter/shell/common/platform_view.h"
#include "flutter/shell/common/rasterizer.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterView.h"
#import "flutter/shell/platform/darwin/ios/ios_surface_software.h"
#include "third_party/skia/include/utils/mac/SkCGUtils.h"
// This is mostly a duplication of FlutterView.
// TODO(amirh): once GL support is in evaluate if we can merge this with FlutterView.
@implementation FlutterOverlayView {
fml::CFRef<CGColorSpaceRef> _colorSpaceRef;
}
- (instancetype)initWithFrame:(CGRect)frame {
NSAssert(NO, @"FlutterOverlayView must init or initWithContentsScale");
return nil;
}
- (instancetype)initWithCoder:(NSCoder*)aDecoder {
NSAssert(NO, @"FlutterOverlayView must init or initWithContentsScale");
return nil;
}
- (instancetype)init {
self = [super initWithFrame:CGRectZero];
if (self) {
self.layer.opaque = NO;
self.userInteractionEnabled = NO;
self.autoresizingMask = (UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight);
}
return self;
}
- (instancetype)initWithContentsScale:(CGFloat)contentsScale
pixelFormat:(MTLPixelFormat)pixelFormat {
self = [self init];
if ([self.layer isKindOfClass:NSClassFromString(@"CAMetalLayer")]) {
self.layer.allowsGroupOpacity = NO;
self.layer.contentsScale = contentsScale;
self.layer.rasterizationScale = contentsScale;
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunguarded-availability-new"
CAMetalLayer* layer = (CAMetalLayer*)self.layer;
#pragma clang diagnostic pop
layer.pixelFormat = pixelFormat;
if (pixelFormat == MTLPixelFormatRGBA16Float) {
self->_colorSpaceRef = fml::CFRef(CGColorSpaceCreateWithName(kCGColorSpaceExtendedSRGB));
layer.colorspace = self->_colorSpaceRef;
}
}
return self;
}
+ (Class)layerClass {
return [FlutterView layerClass];
}
// TODO(amirh): implement drawLayer to support snapshotting.
@end
| engine/shell/platform/darwin/ios/framework/Source/FlutterOverlayView.mm/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterOverlayView.mm",
"repo_id": "engine",
"token_count": 917
} | 362 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERSPELLCHECKPLUGIN_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERSPELLCHECKPLUGIN_H_
#include "flutter/fml/memory/weak_ptr.h"
#import "flutter/shell/platform/darwin/common/framework/Headers/FlutterChannels.h"
#import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterEngine.h"
@interface FlutterSpellCheckPlugin : NSObject
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result;
@end
@interface FlutterSpellCheckResult : NSObject
@property(nonatomic, copy, readonly) NSArray<NSString*>* suggestions;
@property(nonatomic, assign, readonly) NSRange misspelledRange;
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)initWithMisspelledRange:(NSRange)range
suggestions:(NSArray<NSString*>*)suggestions NS_DESIGNATED_INITIALIZER;
- (NSDictionary<NSString*, NSObject*>*)toDictionary;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERSPELLCHECKPLUGIN_H_
| engine/shell/platform/darwin/ios/framework/Source/FlutterSpellCheckPlugin.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterSpellCheckPlugin.h",
"repo_id": "engine",
"token_count": 462
} | 363 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterUndoManagerPlugin.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterEngine_Internal.h"
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#include "flutter/fml/logging.h"
#pragma mark - UndoManager channel method names.
static NSString* const kSetUndoStateMethod = @"UndoManager.setUndoState";
#pragma mark - Undo State field names
static NSString* const kCanUndo = @"canUndo";
static NSString* const kCanRedo = @"canRedo";
@implementation FlutterUndoManagerPlugin {
id<FlutterUndoManagerDelegate> _undoManagerDelegate;
}
- (instancetype)initWithDelegate:(id<FlutterUndoManagerDelegate>)undoManagerDelegate {
self = [super init];
if (self) {
// `_undoManagerDelegate` is a weak reference because it should retain FlutterUndoManagerPlugin.
_undoManagerDelegate = undoManagerDelegate;
}
return self;
}
- (void)dealloc {
[self resetUndoManager];
[super dealloc];
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
NSString* method = call.method;
id args = call.arguments;
if ([method isEqualToString:kSetUndoStateMethod]) {
[self setUndoState:args];
result(nil);
} else {
result(FlutterMethodNotImplemented);
}
}
- (NSUndoManager*)undoManager {
return _viewController.undoManager;
}
- (void)resetUndoManager API_AVAILABLE(ios(9.0)) {
[[self undoManager] removeAllActionsWithTarget:self];
}
- (void)registerUndoWithDirection:(FlutterUndoRedoDirection)direction API_AVAILABLE(ios(9.0)) {
[[self undoManager] beginUndoGrouping];
[[self undoManager] registerUndoWithTarget:self
handler:^(id target) {
// Register undo with opposite direction.
FlutterUndoRedoDirection newDirection =
(direction == FlutterUndoRedoDirectionRedo)
? FlutterUndoRedoDirectionUndo
: FlutterUndoRedoDirectionRedo;
[target registerUndoWithDirection:newDirection];
// Invoke method on delegate.
[_undoManagerDelegate flutterUndoManagerPlugin:self
handleUndoWithDirection:direction];
}];
[[self undoManager] endUndoGrouping];
}
- (void)registerRedo API_AVAILABLE(ios(9.0)) {
[[self undoManager] beginUndoGrouping];
[[self undoManager]
registerUndoWithTarget:self
handler:^(id target) {
// Register undo with opposite direction.
[target registerUndoWithDirection:FlutterUndoRedoDirectionRedo];
}];
[[self undoManager] endUndoGrouping];
[[self undoManager] undo];
}
- (void)setUndoState:(NSDictionary*)dictionary API_AVAILABLE(ios(9.0)) {
BOOL groupsByEvent = [self undoManager].groupsByEvent;
[self undoManager].groupsByEvent = NO;
BOOL canUndo = [dictionary[kCanUndo] boolValue];
BOOL canRedo = [dictionary[kCanRedo] boolValue];
[self resetUndoManager];
if (canUndo) {
[self registerUndoWithDirection:FlutterUndoRedoDirectionUndo];
}
if (canRedo) {
[self registerRedo];
}
if (_viewController.engine.textInputPlugin.textInputView != nil) {
// This is needed to notify the iPadOS keyboard that it needs to update the
// state of the UIBarButtons. Otherwise, the state changes to NSUndoManager
// will not show up until the next keystroke (or other trigger).
UITextInputAssistantItem* assistantItem =
_viewController.engine.textInputPlugin.textInputView.inputAssistantItem;
assistantItem.leadingBarButtonGroups = assistantItem.leadingBarButtonGroups;
}
[self undoManager].groupsByEvent = groupsByEvent;
}
@end
| engine/shell/platform/darwin/ios/framework/Source/FlutterUndoManagerPlugin.mm/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterUndoManagerPlugin.mm",
"repo_id": "engine",
"token_count": 1753
} | 364 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_UIVIEWCONTROLLER_FLUTTERSCREENANDSCENEIFLOADED_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_UIVIEWCONTROLLER_FLUTTERSCREENANDSCENEIFLOADED_H_
#import <UIKit/UIKit.h>
@interface UIViewController (FlutterScreenAndSceneIfLoaded)
/// Returns a UIWindowScene if the UIViewController's view is loaded, and nil otherwise.
- (UIWindowScene*)flutterWindowSceneIfViewLoaded API_AVAILABLE(ios(13.0));
/// Before iOS 13, returns the main screen; After iOS 13, returns the screen the UIViewController is
/// attached to if its view is loaded, and nil otherwise.
- (UIScreen*)flutterScreenIfViewLoaded;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_UIVIEWCONTROLLER_FLUTTERSCREENANDSCENEIFLOADED_H_
| engine/shell/platform/darwin/ios/framework/Source/UIViewController+FlutterScreenAndSceneIfLoaded.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/UIViewController+FlutterScreenAndSceneIfLoaded.h",
"repo_id": "engine",
"token_count": 352
} | 365 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_PROFILER_METRICS_IOS_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_PROFILER_METRICS_IOS_H_
#include <mach/mach.h>
#include <cassert>
#include <optional>
#include "flutter/fml/logging.h"
#include "flutter/shell/profiling/sampling_profiler.h"
namespace flutter {
/**
* @brief Utility class that gathers profiling metrics used by
* `flutter::SamplingProfiler`.
*
* @see flutter::SamplingProfiler
*/
class ProfilerMetricsIOS {
public:
ProfilerMetricsIOS() = default;
ProfileSample GenerateSample();
private:
std::optional<CpuUsageInfo> CpuUsage();
std::optional<MemoryUsageInfo> MemoryUsage();
FML_DISALLOW_COPY_AND_ASSIGN(ProfilerMetricsIOS);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_PROFILER_METRICS_IOS_H_
| engine/shell/platform/darwin/ios/framework/Source/profiler_metrics_ios.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/profiler_metrics_ios.h",
"repo_id": "engine",
"token_count": 381
} | 366 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "flutter/shell/platform/darwin/ios/ios_external_view_embedder.h"
#include "flutter/common/constants.h"
namespace flutter {
IOSExternalViewEmbedder::IOSExternalViewEmbedder(
const std::shared_ptr<FlutterPlatformViewsController>& platform_views_controller,
std::shared_ptr<IOSContext> context)
: platform_views_controller_(platform_views_controller), ios_context_(std::move(context)) {
FML_CHECK(ios_context_);
}
IOSExternalViewEmbedder::~IOSExternalViewEmbedder() = default;
// |ExternalViewEmbedder|
DlCanvas* IOSExternalViewEmbedder::GetRootCanvas() {
// On iOS, the root surface is created from the on-screen render target. Only the surfaces for the
// various overlays are controlled by this class.
return nullptr;
}
// |ExternalViewEmbedder|
void IOSExternalViewEmbedder::CancelFrame() {
TRACE_EVENT0("flutter", "IOSExternalViewEmbedder::CancelFrame");
FML_CHECK(platform_views_controller_);
platform_views_controller_->CancelFrame();
}
// |ExternalViewEmbedder|
void IOSExternalViewEmbedder::BeginFrame(
GrDirectContext* context,
const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger) {}
// |ExternalViewEmbedder|
void IOSExternalViewEmbedder::PrepareFlutterView(int64_t flutter_view_id,
SkISize frame_size,
double device_pixel_ratio) {
// TODO(dkwingsmt): This class only supports rendering into the implicit view.
// Properly support multi-view in the future.
FML_DCHECK(flutter_view_id == kFlutterImplicitViewId);
FML_CHECK(platform_views_controller_);
platform_views_controller_->BeginFrame(frame_size);
}
// |ExternalViewEmbedder|
void IOSExternalViewEmbedder::PrerollCompositeEmbeddedView(
int64_t view_id,
std::unique_ptr<EmbeddedViewParams> params) {
TRACE_EVENT0("flutter", "IOSExternalViewEmbedder::PrerollCompositeEmbeddedView");
FML_CHECK(platform_views_controller_);
platform_views_controller_->PrerollCompositeEmbeddedView(view_id, std::move(params));
}
// |ExternalViewEmbedder|
PostPrerollResult IOSExternalViewEmbedder::PostPrerollAction(
const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger) {
TRACE_EVENT0("flutter", "IOSExternalViewEmbedder::PostPrerollAction");
FML_CHECK(platform_views_controller_);
PostPrerollResult result = platform_views_controller_->PostPrerollAction(raster_thread_merger);
return result;
}
// |ExternalViewEmbedder|
DlCanvas* IOSExternalViewEmbedder::CompositeEmbeddedView(int64_t view_id) {
TRACE_EVENT0("flutter", "IOSExternalViewEmbedder::CompositeEmbeddedView");
FML_CHECK(platform_views_controller_);
return platform_views_controller_->CompositeEmbeddedView(view_id);
}
// |ExternalViewEmbedder|
void IOSExternalViewEmbedder::SubmitFlutterView(
GrDirectContext* context,
const std::shared_ptr<impeller::AiksContext>& aiks_context,
std::unique_ptr<SurfaceFrame> frame) {
TRACE_EVENT0("flutter", "IOSExternalViewEmbedder::SubmitFlutterView");
FML_CHECK(platform_views_controller_);
platform_views_controller_->SubmitFrame(context, ios_context_, std::move(frame));
TRACE_EVENT0("flutter", "IOSExternalViewEmbedder::DidSubmitFrame");
}
// |ExternalViewEmbedder|
void IOSExternalViewEmbedder::EndFrame(
bool should_resubmit_frame,
const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger) {
TRACE_EVENT0("flutter", "IOSExternalViewEmbedder::EndFrame");
platform_views_controller_->EndFrame(should_resubmit_frame, raster_thread_merger);
}
// |ExternalViewEmbedder|
bool IOSExternalViewEmbedder::SupportsDynamicThreadMerging() {
return true;
}
// |ExternalViewEmbedder|
void IOSExternalViewEmbedder::PushFilterToVisitedPlatformViews(
const std::shared_ptr<const DlImageFilter>& filter,
const SkRect& filter_rect) {
platform_views_controller_->PushFilterToVisitedPlatformViews(filter, filter_rect);
}
// |ExternalViewEmbedder|
void IOSExternalViewEmbedder::PushVisitedPlatformView(int64_t view_id) {
platform_views_controller_->PushVisitedPlatformView(view_id);
}
} // namespace flutter
| engine/shell/platform/darwin/ios/ios_external_view_embedder.mm/0 | {
"file_path": "engine/shell/platform/darwin/ios/ios_external_view_embedder.mm",
"repo_id": "engine",
"token_count": 1510
} | 367 |
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
assert(is_mac)
import("//build/config/mac/mac_sdk.gni")
import("//flutter/build/zip_bundle.gni")
import("//flutter/shell/gpu/gpu.gni")
import("//flutter/shell/platform/darwin/common/framework_common.gni")
import("//flutter/shell/platform/glfw/config.gni")
import("//flutter/testing/testing.gni")
shell_gpu_configuration("macos_gpu_configuration") {
enable_software = true
enable_gl = true
enable_vulkan = false
enable_metal = shell_enable_metal
}
group("macos") {
deps = [ ":flutter_framework" ]
if (build_glfw_shell) {
deps += [
":flutter_macos_glfw",
"//flutter/shell/platform/glfw:publish_headers_glfw",
"//flutter/shell/platform/glfw/client_wrapper:publish_wrapper_glfw",
]
}
}
_flutter_framework_name = "FlutterMacOS"
_flutter_framework_filename = "$_flutter_framework_name.framework"
_flutter_framework_dir = "$root_out_dir/$_flutter_framework_filename"
_framework_binary_subpath = "Versions/A/$_flutter_framework_name"
# The headers that will be copied to the framework and be accessed from outside
# the Flutter engine source root.
_flutter_framework_headers = [
"framework/Headers/FlutterAppDelegate.h",
"framework/Headers/FlutterAppLifecycleDelegate.h",
"framework/Headers/FlutterEngine.h",
"framework/Headers/FlutterMacOS.h",
"framework/Headers/FlutterPlatformViews.h",
"framework/Headers/FlutterPluginMacOS.h",
"framework/Headers/FlutterPluginRegistrarMacOS.h",
"framework/Headers/FlutterViewController.h",
]
_flutter_framework_headers_copy_dir =
"$_flutter_framework_dir/Versions/A/Headers"
source_set("flutter_framework_source") {
visibility = [ ":*" ]
sources = [
"framework/Source/AccessibilityBridgeMac.h",
"framework/Source/AccessibilityBridgeMac.mm",
"framework/Source/FlutterAppDelegate.mm",
"framework/Source/FlutterAppLifecycleDelegate.mm",
"framework/Source/FlutterBackingStore.h",
"framework/Source/FlutterBackingStore.mm",
"framework/Source/FlutterChannelKeyResponder.h",
"framework/Source/FlutterChannelKeyResponder.mm",
"framework/Source/FlutterCompositor.h",
"framework/Source/FlutterCompositor.mm",
"framework/Source/FlutterDartProject.mm",
"framework/Source/FlutterDartProject_Internal.h",
"framework/Source/FlutterDisplayLink.h",
"framework/Source/FlutterDisplayLink.mm",
"framework/Source/FlutterEmbedderKeyResponder.h",
"framework/Source/FlutterEmbedderKeyResponder.mm",
"framework/Source/FlutterEngine.mm",
"framework/Source/FlutterEngine_Internal.h",
"framework/Source/FlutterExternalTexture.h",
"framework/Source/FlutterExternalTexture.mm",
"framework/Source/FlutterKeyPrimaryResponder.h",
"framework/Source/FlutterKeyboardManager.h",
"framework/Source/FlutterKeyboardManager.mm",
"framework/Source/FlutterKeyboardViewDelegate.h",
"framework/Source/FlutterMenuPlugin.h",
"framework/Source/FlutterMenuPlugin.mm",
"framework/Source/FlutterMenuPlugin_Internal.h",
"framework/Source/FlutterMouseCursorPlugin.h",
"framework/Source/FlutterMouseCursorPlugin.mm",
"framework/Source/FlutterMutatorView.h",
"framework/Source/FlutterMutatorView.mm",
"framework/Source/FlutterPlatformNodeDelegateMac.h",
"framework/Source/FlutterPlatformNodeDelegateMac.mm",
"framework/Source/FlutterPlatformViewController.h",
"framework/Source/FlutterPlatformViewController.mm",
"framework/Source/FlutterRenderer.h",
"framework/Source/FlutterRenderer.mm",
"framework/Source/FlutterSurface.h",
"framework/Source/FlutterSurface.mm",
"framework/Source/FlutterSurfaceManager.h",
"framework/Source/FlutterSurfaceManager.mm",
"framework/Source/FlutterTextInputPlugin.h",
"framework/Source/FlutterTextInputPlugin.mm",
"framework/Source/FlutterTextInputSemanticsObject.h",
"framework/Source/FlutterTextInputSemanticsObject.mm",
"framework/Source/FlutterTextureRegistrar.h",
"framework/Source/FlutterTextureRegistrar.mm",
"framework/Source/FlutterThreadSynchronizer.h",
"framework/Source/FlutterThreadSynchronizer.mm",
"framework/Source/FlutterTimeConverter.h",
"framework/Source/FlutterTimeConverter.mm",
"framework/Source/FlutterVSyncWaiter.h",
"framework/Source/FlutterVSyncWaiter.mm",
"framework/Source/FlutterView.h",
"framework/Source/FlutterView.mm",
"framework/Source/FlutterViewController.mm",
"framework/Source/FlutterViewController_Internal.h",
"framework/Source/FlutterViewEngineProvider.h",
"framework/Source/FlutterViewEngineProvider.mm",
"framework/Source/FlutterViewProvider.h",
"framework/Source/KeyCodeMap.g.mm",
]
sources += _flutter_framework_headers
deps = [
":macos_gpu_configuration",
"//flutter/flow:flow",
"//flutter/fml",
"//flutter/shell/platform/common:common_cpp_accessibility",
"//flutter/shell/platform/common:common_cpp_enums",
"//flutter/shell/platform/common:common_cpp_input",
"//flutter/shell/platform/common:common_cpp_switches",
"//flutter/shell/platform/darwin/common:availability_version_check",
"//flutter/shell/platform/darwin/common:framework_common",
"//flutter/shell/platform/darwin/graphics:graphics",
"//flutter/shell/platform/embedder:embedder_as_internal_library",
"//flutter/skia",
]
public_configs = [ "//flutter:config" ]
defines = [
"FLUTTER_FRAMEWORK",
"FLUTTER_ENGINE_NO_PROTOTYPES",
]
cflags_objcc = flutter_cflags_objcc_arc
frameworks = [
"Carbon.framework",
"Cocoa.framework",
"CoreVideo.framework",
"IOSurface.framework",
"Metal.framework",
"QuartzCore.framework",
]
}
shared_library("flutter_framework_dylib") {
visibility = [ ":*" ]
output_name = "$_flutter_framework_name"
ldflags = [
"-Wl,-install_name,@rpath/$_flutter_framework_filename/$_framework_binary_subpath",
"-fapplication-extension",
]
deps = [ ":flutter_framework_source" ]
}
test_fixtures("flutter_desktop_darwin_fixtures") {
dart_main = "framework/Source/fixtures/flutter_desktop_test.dart"
fixtures = [ "//flutter/third_party/icu/common/icudtl.dat" ]
}
executable("flutter_desktop_darwin_unittests") {
testonly = true
sources = [
"framework/Source/AccessibilityBridgeMacTest.mm",
"framework/Source/FlutterAppDelegateTest.mm",
"framework/Source/FlutterAppLifecycleDelegateTest.mm",
"framework/Source/FlutterChannelKeyResponderTest.mm",
"framework/Source/FlutterDisplayLinkTest.mm",
"framework/Source/FlutterEmbedderExternalTextureTest.mm",
"framework/Source/FlutterEmbedderKeyResponderTest.mm",
"framework/Source/FlutterEngineTest.mm",
"framework/Source/FlutterEngineTestUtils.h",
"framework/Source/FlutterEngineTestUtils.mm",
"framework/Source/FlutterKeyboardManagerTest.mm",
"framework/Source/FlutterMenuPluginTest.mm",
"framework/Source/FlutterMutatorViewTest.mm",
"framework/Source/FlutterPlatformNodeDelegateMacTest.mm",
"framework/Source/FlutterPlatformViewControllerTest.mm",
"framework/Source/FlutterSurfaceManagerTest.mm",
"framework/Source/FlutterTextInputPluginTest.mm",
"framework/Source/FlutterTextInputSemanticsObjectTest.mm",
"framework/Source/FlutterThreadSynchronizerTest.mm",
"framework/Source/FlutterVSyncWaiterTest.mm",
"framework/Source/FlutterViewControllerTest.mm",
"framework/Source/FlutterViewControllerTestUtils.h",
"framework/Source/FlutterViewControllerTestUtils.mm",
"framework/Source/FlutterViewEngineProviderTest.mm",
"framework/Source/FlutterViewTest.mm",
"framework/Source/KeyCodeMapTest.mm",
"framework/Source/TestFlutterPlatformView.h",
"framework/Source/TestFlutterPlatformView.mm",
]
cflags_objcc = flutter_cflags_objcc_arc
ldflags = [ "-ObjC" ]
deps = [
":flutter_desktop_darwin_fixtures",
":flutter_framework_source",
"//flutter/shell/platform/common:common_cpp_accessibility",
"//flutter/shell/platform/common:common_cpp_enums",
"//flutter/shell/platform/darwin/common:framework_common",
"//flutter/shell/platform/darwin/graphics",
"//flutter/shell/platform/embedder:embedder_as_internal_library",
"//flutter/shell/platform/embedder:embedder_test_utils",
"//flutter/testing",
"//flutter/testing:autoreleasepool_test",
"//flutter/testing:dart",
"//flutter/testing:skia",
"//flutter/testing:testing_lib",
"//flutter/third_party/ocmock:ocmock",
]
}
copy("copy_dylib") {
visibility = [ ":*" ]
sources = [ "$root_out_dir/lib$_flutter_framework_name.dylib" ]
outputs = [ "$_flutter_framework_dir/$_framework_binary_subpath" ]
deps = [ ":flutter_framework_dylib" ]
}
copy("copy_framework_info_plist") {
visibility = [ ":*" ]
sources = [ "framework/Info.plist" ]
outputs = [ "$_flutter_framework_dir/Versions/A/Resources/Info.plist" ]
}
copy("copy_framework_module_map") {
visibility = [ ":*" ]
sources = [ "framework/module.modulemap" ]
outputs = [ "$_flutter_framework_dir/Versions/A/Modules/module.modulemap" ]
}
action("copy_framework_headers") {
script = "//flutter/sky/tools/install_framework_headers.py"
visibility = [ ":*" ]
sources = get_path_info(_flutter_framework_headers, "abspath") +
framework_common_headers
outputs = []
foreach(header, sources) {
header_basename = get_path_info(header, "file")
outputs += [ "$_flutter_framework_headers_copy_dir/$header_basename" ]
}
args = [
"--location",
rebase_path("$_flutter_framework_headers_copy_dir"),
"--headers",
] + rebase_path(sources, "", "//")
}
copy("copy_framework_icu") {
visibility = [ ":*" ]
sources = [ "//flutter/third_party/icu/flutter/icudtl.dat" ]
outputs =
[ "$_flutter_framework_dir/Versions/A/Resources/{{source_file_part}}" ]
}
copy("copy_license") {
visibility = [ ":*" ]
sources = [ "//LICENSE" ]
outputs = [ "$root_out_dir/LICENSE" ]
}
action("_generate_symlinks") {
visibility = [ ":*" ]
script = "//build/config/mac/package_framework.py"
outputs = [
"$root_build_dir/$_flutter_framework_name.stamp",
"$root_out_dir/$_flutter_framework_filename",
]
args = [
"--framework",
"$_flutter_framework_filename",
"--version",
"A",
"--contents",
"$_flutter_framework_name",
"Resources",
"Headers",
"Modules",
"--stamp",
"$_flutter_framework_name.stamp",
]
deps = [
":copy_dylib",
":copy_framework_headers",
":copy_framework_icu",
":copy_framework_info_plist",
":copy_framework_module_map",
":copy_license",
]
metadata = {
macos_framework_without_entitlement =
[ "FlutterMacOS.framework.zip/Versions/A/FlutterMacOS" ]
}
}
group("universal_flutter_framework") {
visibility = [ ":*" ]
deps = [
":_generate_symlinks",
":_generate_symlinks_and_verify_framework_module",
]
}
action("flutter_framework") {
script = "//flutter/sky/tools/create_xcframework.py"
outputs = [ "$root_out_dir/FlutterMacOS.xcframework" ]
args = [
"--frameworks",
rebase_path(_flutter_framework_dir),
"--name",
"FlutterMacOS",
"--location",
rebase_path(root_out_dir),
]
deps = [ ":universal_flutter_framework" ]
}
if (build_glfw_shell) {
shared_library("flutter_macos_glfw") {
deps = [ "//flutter/shell/platform/glfw:flutter_glfw" ]
public_configs = [ "//flutter:config" ]
}
}
zip_bundle("zip_macos_flutter_framework") {
deps = [
":_generate_symlinks",
":macos_framework_without_entitlement_config",
]
prefix = "$full_platform_name-$flutter_runtime_mode/"
if (flutter_runtime_mode == "debug") {
prefix = "$full_platform_name/"
}
output = "${prefix}FlutterMacOS.framework.zip"
visibility = [ ":*" ]
files = [
{
source = "$root_out_dir/FlutterMacOS.framework"
destination = "FlutterMacOS.framework"
},
]
}
generated_file("macos_framework_without_entitlement_config") {
outputs = [ "$target_gen_dir/framework_without_entitlements.txt" ]
data_keys = [ "macos_framework_without_entitlement" ]
deps = [ ":_generate_symlinks" ]
}
shared_library("_generate_symlinks_and_verify_framework_module") {
framework_search_path = rebase_path("$root_out_dir")
visibility = [ ":*" ]
cflags_objc = [ "-F$framework_search_path" ]
ldflags = [
"-F$framework_search_path",
"-fapplication-extension",
"-Xlinker",
"-fatal_warnings",
]
frameworks = [ "FlutterMacOS.framework" ]
sources = [ "framework/Source/FlutterUmbrellaImportTests.m" ]
deps = [ ":_generate_symlinks" ]
}
| engine/shell/platform/darwin/macos/BUILD.gn/0 | {
"file_path": "engine/shell/platform/darwin/macos/BUILD.gn",
"repo_id": "engine",
"token_count": 4813
} | 368 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERAPPDELEGATE_INTERNAL_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERAPPDELEGATE_INTERNAL_H_
#import "flutter/shell/platform/darwin/macos/framework/Headers/FlutterAppDelegate.h"
#import "flutter/shell/platform/darwin/macos/framework/Source/FlutterEngine_Internal.h"
@interface FlutterAppDelegate ()
/**
* Holds a weak reference to the termination handler owned by the engine.
* Called by the |FlutterApplication| when termination is requested by the OS.
*/
@property(readwrite, nullable, weak) FlutterEngineTerminationHandler* terminationHandler;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERAPPDELEGATE_INTERNAL_H_
| engine/shell/platform/darwin/macos/framework/Source/FlutterAppDelegate_Internal.h/0 | {
"file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterAppDelegate_Internal.h",
"repo_id": "engine",
"token_count": 314
} | 369 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import <Foundation/Foundation.h>
#import <Metal/Metal.h>
#include <memory>
#include <vector>
#import "flutter/display_list/skia/dl_sk_canvas.h"
#import "flutter/shell/platform/darwin/graphics/FlutterDarwinContextMetalSkia.h"
#import "flutter/shell/platform/darwin/graphics/FlutterDarwinExternalTextureMetal.h"
#import "flutter/shell/platform/darwin/macos/framework/Source/FlutterExternalTexture.h"
#include "flutter/shell/platform/embedder/embedder.h"
#include "flutter/shell/platform/embedder/embedder_external_texture_metal.h"
#include "flutter/testing/autoreleasepool_test.h"
#include "flutter/testing/testing.h"
#include "third_party/googletest/googletest/include/gtest/gtest.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/core/SkSamplingOptions.h"
#include "third_party/skia/include/core/SkSurface.h"
#include "third_party/skia/include/gpu/ganesh/SkSurfaceGanesh.h"
@interface TestExternalTexture : NSObject <FlutterTexture>
- (nonnull instancetype)initWidth:(size_t)width
height:(size_t)height
pixelFormatType:(OSType)pixelFormatType;
@end
@implementation TestExternalTexture {
size_t _width;
size_t _height;
OSType _pixelFormatType;
}
- (nonnull instancetype)initWidth:(size_t)width
height:(size_t)height
pixelFormatType:(OSType)pixelFormatType {
if (self = [super init]) {
_width = width;
_height = height;
_pixelFormatType = pixelFormatType;
}
return self;
}
- (CVPixelBufferRef)copyPixelBuffer {
return [self pixelBuffer];
}
- (CVPixelBufferRef)pixelBuffer {
NSDictionary* options = @{
// This key is required to generate SKPicture with CVPixelBufferRef in metal.
(NSString*)kCVPixelBufferMetalCompatibilityKey : @YES
};
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, _width, _width, _pixelFormatType,
(__bridge CFDictionaryRef)options, &pxbuffer);
NSAssert(status == kCVReturnSuccess && pxbuffer != NULL, @"Failed to create pixel buffer.");
return pxbuffer;
}
@end
namespace flutter::testing {
// Test-specific name for AutoreleasePoolTest fixture.
using FlutterEmbedderExternalTextureTest = AutoreleasePoolTest;
TEST_F(FlutterEmbedderExternalTextureTest, TestTextureResolution) {
// Constants.
const size_t width = 100;
const size_t height = 100;
const int64_t texture_id = 1;
// Set up the surface.
FlutterDarwinContextMetalSkia* darwinContextMetal =
[[FlutterDarwinContextMetalSkia alloc] initWithDefaultMTLDevice];
SkImageInfo info = SkImageInfo::MakeN32Premul(width, height);
GrDirectContext* grContext = darwinContextMetal.mainContext.get();
sk_sp<SkSurface> gpuSurface(SkSurfaces::RenderTarget(grContext, skgpu::Budgeted::kNo, info));
// Create a texture.
MTLTextureDescriptor* textureDescriptor = [[MTLTextureDescriptor alloc] init];
textureDescriptor.pixelFormat = MTLPixelFormatBGRA8Unorm;
textureDescriptor.width = width;
textureDescriptor.height = height;
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
id<MTLTexture> mtlTexture =
[darwinContextMetal.device newTextureWithDescriptor:textureDescriptor];
std::vector<FlutterMetalTextureHandle> textures = {
(__bridge FlutterMetalTextureHandle)mtlTexture,
};
// Callback to resolve the texture.
EmbedderExternalTextureMetal::ExternalTextureCallback callback = [&](int64_t texture_id, size_t w,
size_t h) {
EXPECT_TRUE(w == width);
EXPECT_TRUE(h == height);
auto texture = std::make_unique<FlutterMetalExternalTexture>();
texture->struct_size = sizeof(FlutterMetalExternalTexture);
texture->num_textures = 1;
texture->height = h;
texture->width = w;
texture->pixel_format = FlutterMetalExternalTexturePixelFormat::kRGBA;
texture->textures = textures.data();
return texture;
};
// Render the texture.
std::unique_ptr<flutter::Texture> texture =
std::make_unique<EmbedderExternalTextureMetal>(texture_id, callback);
SkRect bounds = SkRect::MakeWH(info.width(), info.height());
DlImageSampling sampling = DlImageSampling::kNearestNeighbor;
DlSkCanvasAdapter canvas(gpuSurface->getCanvas());
flutter::Texture::PaintContext context{
.canvas = &canvas,
.gr_context = grContext,
};
texture->Paint(context, bounds, /*freeze=*/false, sampling);
ASSERT_TRUE(mtlTexture != nil);
gpuSurface->makeImageSnapshot();
}
TEST_F(FlutterEmbedderExternalTextureTest, TestPopulateExternalTexture) {
// Constants.
const size_t width = 100;
const size_t height = 100;
const int64_t texture_id = 1;
// Set up the surface.
FlutterDarwinContextMetalSkia* darwinContextMetal =
[[FlutterDarwinContextMetalSkia alloc] initWithDefaultMTLDevice];
SkImageInfo info = SkImageInfo::MakeN32Premul(width, height);
GrDirectContext* grContext = darwinContextMetal.mainContext.get();
sk_sp<SkSurface> gpuSurface(SkSurfaces::RenderTarget(grContext, skgpu::Budgeted::kNo, info));
// Create a texture.
TestExternalTexture* testExternalTexture =
[[TestExternalTexture alloc] initWidth:width
height:height
pixelFormatType:kCVPixelFormatType_32BGRA];
FlutterExternalTexture* textureHolder =
[[FlutterExternalTexture alloc] initWithFlutterTexture:testExternalTexture
darwinMetalContext:darwinContextMetal];
// Callback to resolve the texture.
EmbedderExternalTextureMetal::ExternalTextureCallback callback = [&](int64_t texture_id, size_t w,
size_t h) {
EXPECT_TRUE(w == width);
EXPECT_TRUE(h == height);
auto texture = std::make_unique<FlutterMetalExternalTexture>();
[textureHolder populateTexture:texture.get()];
EXPECT_TRUE(texture->num_textures == 1);
EXPECT_TRUE(texture->textures != nullptr);
EXPECT_TRUE(texture->pixel_format == FlutterMetalExternalTexturePixelFormat::kRGBA);
return texture;
};
// Render the texture.
std::unique_ptr<flutter::Texture> texture =
std::make_unique<EmbedderExternalTextureMetal>(texture_id, callback);
SkRect bounds = SkRect::MakeWH(info.width(), info.height());
DlImageSampling sampling = DlImageSampling::kNearestNeighbor;
DlSkCanvasAdapter canvas(gpuSurface->getCanvas());
flutter::Texture::PaintContext context{
.canvas = &canvas,
.gr_context = grContext,
};
texture->Paint(context, bounds, /*freeze=*/false, sampling);
gpuSurface->makeImageSnapshot();
}
TEST_F(FlutterEmbedderExternalTextureTest, TestPopulateExternalTextureYUVA) {
// Constants.
const size_t width = 100;
const size_t height = 100;
const int64_t texture_id = 1;
// Set up the surface.
FlutterDarwinContextMetalSkia* darwinContextMetal =
[[FlutterDarwinContextMetalSkia alloc] initWithDefaultMTLDevice];
SkImageInfo info = SkImageInfo::MakeN32Premul(width, height);
GrDirectContext* grContext = darwinContextMetal.mainContext.get();
sk_sp<SkSurface> gpuSurface(SkSurfaces::RenderTarget(grContext, skgpu::Budgeted::kNo, info));
// Create a texture.
TestExternalTexture* testExternalTexture =
[[TestExternalTexture alloc] initWidth:width
height:height
pixelFormatType:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
FlutterExternalTexture* textureHolder =
[[FlutterExternalTexture alloc] initWithFlutterTexture:testExternalTexture
darwinMetalContext:darwinContextMetal];
// Callback to resolve the texture.
EmbedderExternalTextureMetal::ExternalTextureCallback callback = [&](int64_t texture_id, size_t w,
size_t h) {
EXPECT_TRUE(w == width);
EXPECT_TRUE(h == height);
auto texture = std::make_unique<FlutterMetalExternalTexture>();
[textureHolder populateTexture:texture.get()];
EXPECT_TRUE(texture->num_textures == 2);
EXPECT_TRUE(texture->textures != nullptr);
EXPECT_TRUE(texture->pixel_format == FlutterMetalExternalTexturePixelFormat::kYUVA);
EXPECT_TRUE(texture->yuv_color_space ==
FlutterMetalExternalTextureYUVColorSpace::kBT601LimitedRange);
return texture;
};
// Render the texture.
std::unique_ptr<flutter::Texture> texture =
std::make_unique<EmbedderExternalTextureMetal>(texture_id, callback);
SkRect bounds = SkRect::MakeWH(info.width(), info.height());
DlImageSampling sampling = DlImageSampling::kNearestNeighbor;
DlSkCanvasAdapter canvas(gpuSurface->getCanvas());
flutter::Texture::PaintContext context{
.canvas = &canvas,
.gr_context = grContext,
};
texture->Paint(context, bounds, /*freeze=*/false, sampling);
gpuSurface->makeImageSnapshot();
}
TEST_F(FlutterEmbedderExternalTextureTest, TestPopulateExternalTextureYUVA2) {
// Constants.
const size_t width = 100;
const size_t height = 100;
const int64_t texture_id = 1;
// Set up the surface.
FlutterDarwinContextMetalSkia* darwinContextMetal =
[[FlutterDarwinContextMetalSkia alloc] initWithDefaultMTLDevice];
SkImageInfo info = SkImageInfo::MakeN32Premul(width, height);
GrDirectContext* grContext = darwinContextMetal.mainContext.get();
sk_sp<SkSurface> gpuSurface(SkSurfaces::RenderTarget(grContext, skgpu::Budgeted::kNo, info));
// Create a texture.
TestExternalTexture* testExternalTexture =
[[TestExternalTexture alloc] initWidth:width
height:height
pixelFormatType:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
FlutterExternalTexture* textureHolder =
[[FlutterExternalTexture alloc] initWithFlutterTexture:testExternalTexture
darwinMetalContext:darwinContextMetal];
// Callback to resolve the texture.
EmbedderExternalTextureMetal::ExternalTextureCallback callback = [&](int64_t texture_id, size_t w,
size_t h) {
EXPECT_TRUE(w == width);
EXPECT_TRUE(h == height);
auto texture = std::make_unique<FlutterMetalExternalTexture>();
[textureHolder populateTexture:texture.get()];
EXPECT_TRUE(texture->num_textures == 2);
EXPECT_TRUE(texture->textures != nullptr);
EXPECT_TRUE(texture->pixel_format == FlutterMetalExternalTexturePixelFormat::kYUVA);
EXPECT_TRUE(texture->yuv_color_space ==
FlutterMetalExternalTextureYUVColorSpace::kBT601FullRange);
return texture;
};
// Render the texture.
std::unique_ptr<flutter::Texture> texture =
std::make_unique<EmbedderExternalTextureMetal>(texture_id, callback);
SkRect bounds = SkRect::MakeWH(info.width(), info.height());
DlImageSampling sampling = DlImageSampling::kNearestNeighbor;
DlSkCanvasAdapter canvas(gpuSurface->getCanvas());
flutter::Texture::PaintContext context{
.canvas = &canvas,
.gr_context = grContext,
};
texture->Paint(context, bounds, /*freeze=*/false, sampling);
gpuSurface->makeImageSnapshot();
}
} // namespace flutter::testing
| engine/shell/platform/darwin/macos/framework/Source/FlutterEmbedderExternalTextureTest.mm/0 | {
"file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterEmbedderExternalTextureTest.mm",
"repo_id": "engine",
"token_count": 4376
} | 370 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.