text
stringlengths 6
13.6M
| id
stringlengths 13
176
| metadata
dict | __index_level_0__
int64 0
1.69k
|
---|---|---|---|
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui;
import '../vector_math.dart';
import 'canvaskit_api.dart';
import 'path.dart';
/// An error related to the CanvasKit rendering backend.
class CanvasKitError extends Error {
CanvasKitError(this.message);
/// Describes this error.
final String message;
@override
String toString() => 'CanvasKitError: $message';
}
/// Creates a new color array.
Float32List makeFreshSkColor(ui.Color color) {
final Float32List result = Float32List(4);
result[0] = color.red / 255.0;
result[1] = color.green / 255.0;
result[2] = color.blue / 255.0;
result[3] = color.alpha / 255.0;
return result;
}
ui.TextPosition fromPositionWithAffinity(SkTextPosition positionWithAffinity) {
final ui.TextAffinity affinity =
ui.TextAffinity.values[positionWithAffinity.affinity.value.toInt()];
return ui.TextPosition(
offset: positionWithAffinity.pos.toInt(),
affinity: affinity,
);
}
/// Shadow flag constants derived from Skia's SkShadowFlags.h.
class SkiaShadowFlags {
/// The occluding object is opaque, making the part of the shadow under the
/// occluder invisible. This allows some optimizations because some parts of
/// the shadow do not need to be accurate.
static const int kNone_ShadowFlag = 0x00;
/// The occluding object is not opaque, making the part of the shadow under the
/// occluder visible. This requires that the shadow is rendered more accurately
/// and therefore is slightly more expensive.
static const int kTransparentOccluder_ShadowFlag = 0x01;
/// Light position represents a direction, light radius is blur radius at
/// elevation 1.
///
/// This makes the shadow to have a fixed position relative to the shape that
/// casts it.
static const int kDirectionalLight_ShadowFlag = 0x04;
/// Complete value for the `flags` argument for opaque occluder.
static const int kDefaultShadowFlags =
kDirectionalLight_ShadowFlag | kNone_ShadowFlag;
/// Complete value for the `flags` argument for transparent occluder.
static const int kTransparentOccluderShadowFlags =
kDirectionalLight_ShadowFlag | kTransparentOccluder_ShadowFlag;
}
// These numbers have been chosen empirically to give a result closest to the
// material spec.
const double ckShadowAmbientAlpha = 0.039;
const double ckShadowSpotAlpha = 0.25;
const double ckShadowLightXOffset = 0;
const double ckShadowLightYOffset = -450;
const double ckShadowLightHeight = 600;
const double ckShadowLightRadius = 800;
const double ckShadowLightXTangent = ckShadowLightXOffset / ckShadowLightHeight;
const double ckShadowLightYTangent = ckShadowLightYOffset / ckShadowLightHeight;
/// Computes the smallest rectangle that contains the shadow.
// Most of this logic is borrowed from SkDrawShadowInfo.cpp in Skia.
// TODO(yjbanov): switch to SkDrawShadowMetrics::GetLocalBounds when available
// See:
// - https://bugs.chromium.org/p/skia/issues/detail?id=11146
// - https://github.com/flutter/flutter/issues/73492
ui.Rect computeSkShadowBounds(
CkPath path,
double elevation,
double devicePixelRatio,
Matrix4 matrix,
) {
ui.Rect pathBounds = path.getBounds();
if (elevation == 0) {
return pathBounds;
}
// For visual correctness the shadow offset and blur does not change with
// parent transforms. Therefore, in general case we have to first transform
// the shape bounds to device coordinates, then compute the shadow bounds,
// then transform the bounds back to local coordinates. However, if the
// transform is an identity or translation (a common case), we can skip this
// step. With directional lighting translation does not affect the size or
// shape of the shadow. Skipping this step saves us two transformRects and
// one matrix inverse.
final bool isComplex = !matrix.isIdentityOrTranslation();
if (isComplex) {
pathBounds = matrix.transformRect(pathBounds);
}
double left = pathBounds.left;
double top = pathBounds.top;
double right = pathBounds.right;
double bottom = pathBounds.bottom;
final double ambientBlur = ambientBlurRadius(elevation);
final double spotBlur = ckShadowLightRadius * elevation;
final double spotOffsetX = -elevation * ckShadowLightXTangent;
final double spotOffsetY = -elevation * ckShadowLightYTangent;
// The extra +1/-1 are to cover possible floating point errors.
left = left - 1 + (spotOffsetX - ambientBlur - spotBlur) * devicePixelRatio;
top = top - 1 + (spotOffsetY - ambientBlur - spotBlur) * devicePixelRatio;
right = right + 1 + (spotOffsetX + ambientBlur + spotBlur) * devicePixelRatio;
bottom =
bottom + 1 + (spotOffsetY + ambientBlur + spotBlur) * devicePixelRatio;
final ui.Rect shadowBounds = ui.Rect.fromLTRB(left, top, right, bottom);
if (isComplex) {
final Matrix4 inverse = Matrix4.zero();
// The inverse only makes sense if the determinat is non-zero.
if (inverse.copyInverse(matrix) != 0.0) {
return inverse.transformRect(shadowBounds);
} else {
return shadowBounds;
}
} else {
return shadowBounds;
}
}
const double kAmbientHeightFactor = 1.0 / 128.0;
const double kAmbientGeomFactor = 64.0;
const double kMaxAmbientRadius =
300 * kAmbientHeightFactor * kAmbientGeomFactor;
double ambientBlurRadius(double height) {
return math.min(
height * kAmbientHeightFactor * kAmbientGeomFactor, kMaxAmbientRadius);
}
void drawSkShadow(
SkCanvas skCanvas,
CkPath path,
ui.Color color,
double elevation,
bool transparentOccluder,
double devicePixelRatio,
) {
int flags = transparentOccluder
? SkiaShadowFlags.kTransparentOccluderShadowFlags
: SkiaShadowFlags.kDefaultShadowFlags;
flags |= SkiaShadowFlags.kDirectionalLight_ShadowFlag;
final ui.Color inAmbient =
color.withAlpha((color.alpha * ckShadowAmbientAlpha).round());
final ui.Color inSpot =
color.withAlpha((color.alpha * ckShadowSpotAlpha).round());
final SkTonalColors inTonalColors = SkTonalColors(
ambient: makeFreshSkColor(inAmbient),
spot: makeFreshSkColor(inSpot),
);
final SkTonalColors tonalColors = canvasKit.computeTonalColors(inTonalColors);
skCanvas.drawShadow(
path.skiaObject,
Float32List(3)..[2] = devicePixelRatio * elevation,
Float32List(3)
..[0] = 0
..[1] = -1
..[2] = 1,
ckShadowLightRadius / ckShadowLightHeight,
tonalColors.ambient,
tonalColors.spot,
flags.toDouble(),
);
}
| engine/lib/web_ui/lib/src/engine/canvaskit/util.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/util.dart",
"repo_id": "engine",
"token_count": 2186
} | 286 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui;
import '../picture.dart';
import '../util.dart';
import '../validators.dart';
import '../vector_math.dart';
import 'painting.dart';
import 'recording_canvas.dart';
import 'render_vertices.dart';
class SurfaceCanvas implements ui.Canvas {
factory SurfaceCanvas(EnginePictureRecorder recorder, [ui.Rect? cullRect]) {
if (recorder.isRecording) {
throw ArgumentError(
'"recorder" must not already be associated with another Canvas.');
}
cullRect ??= ui.Rect.largest;
return SurfaceCanvas._(recorder.beginRecording(cullRect));
}
SurfaceCanvas._(this._canvas);
RecordingCanvas _canvas;
@override
void save() {
_canvas.save();
}
@override
void saveLayer(ui.Rect? bounds, ui.Paint paint) {
if (bounds == null) {
_saveLayerWithoutBounds(paint);
} else {
assert(rectIsValid(bounds));
_saveLayer(bounds, paint);
}
}
void _saveLayerWithoutBounds(ui.Paint paint) {
_canvas.saveLayerWithoutBounds(paint as SurfacePaint);
}
void _saveLayer(ui.Rect bounds, ui.Paint paint) {
_canvas.saveLayer(bounds, paint as SurfacePaint);
}
@override
void restore() {
_canvas.restore();
}
@override
void restoreToCount(int count) {
_canvas.restoreToCount(count);
}
@override
int getSaveCount() => _canvas.saveCount;
@override
void translate(double dx, double dy) {
_canvas.translate(dx, dy);
}
@override
void scale(double sx, [double? sy]) => _scale(sx, sy ?? sx);
void _scale(double sx, double sy) {
_canvas.scale(sx, sy);
}
@override
void rotate(double radians) {
_canvas.rotate(radians);
}
@override
void skew(double sx, double sy) {
_canvas.skew(sx, sy);
}
@override
void transform(Float64List matrix4) {
if (matrix4.length != 16) {
throw ArgumentError('"matrix4" must have 16 entries.');
}
_transform(toMatrix32(matrix4));
}
void _transform(Float32List matrix4) {
_canvas.transform(matrix4);
}
@override
Float64List getTransform() {
return Float64List.fromList(_canvas.getCurrentMatrixUnsafe());
}
@override
void clipRect(ui.Rect rect,
{ui.ClipOp clipOp = ui.ClipOp.intersect, bool doAntiAlias = true}) {
assert(rectIsValid(rect));
_clipRect(rect, clipOp, doAntiAlias);
}
void _clipRect(ui.Rect rect, ui.ClipOp clipOp, bool doAntiAlias) {
_canvas.clipRect(rect, clipOp);
}
@override
void clipRRect(ui.RRect rrect, {bool doAntiAlias = true}) {
assert(rrectIsValid(rrect));
_clipRRect(rrect, doAntiAlias);
}
void _clipRRect(ui.RRect rrect, bool doAntiAlias) {
_canvas.clipRRect(rrect);
}
@override
void clipPath(ui.Path path, {bool doAntiAlias = true}) {
_clipPath(path, doAntiAlias);
}
void _clipPath(ui.Path path, bool doAntiAlias) {
_canvas.clipPath(path, doAntiAlias: doAntiAlias);
}
@override
ui.Rect getDestinationClipBounds() {
return _canvas.getDestinationClipBounds() ?? ui.Rect.largest;
}
ui.Rect _roundOut(ui.Rect rect) {
return ui.Rect.fromLTRB(
rect.left.floorToDouble(),
rect.top.floorToDouble(),
rect.right.ceilToDouble(),
rect.bottom.ceilToDouble(),
);
}
@override
ui.Rect getLocalClipBounds() {
final ui.Rect? destBounds = _canvas.getDestinationClipBounds();
if (destBounds == null) {
return ui.Rect.largest;
}
final Matrix4 transform = Matrix4.fromFloat32List(_canvas.getCurrentMatrixUnsafe());
if (transform.invert() == 0) {
// non-invertible transforms collapse space to a line or point
return ui.Rect.zero;
}
return transform.transformRect(_roundOut(destBounds));
}
@override
void drawColor(ui.Color color, ui.BlendMode blendMode) {
_drawColor(color, blendMode);
}
void _drawColor(ui.Color color, ui.BlendMode blendMode) {
_canvas.drawColor(color, blendMode);
}
@override
void drawLine(ui.Offset p1, ui.Offset p2, ui.Paint paint) {
assert(offsetIsValid(p1));
assert(offsetIsValid(p2));
_drawLine(p1, p2, paint);
}
void _drawLine(ui.Offset p1, ui.Offset p2, ui.Paint paint) {
_canvas.drawLine(p1, p2, paint as SurfacePaint);
}
@override
void drawPaint(ui.Paint paint) {
_drawPaint(paint);
}
void _drawPaint(ui.Paint paint) {
_canvas.drawPaint(paint as SurfacePaint);
}
@override
void drawRect(ui.Rect rect, ui.Paint paint) {
assert(rectIsValid(rect));
_drawRect(rect, paint);
}
void _drawRect(ui.Rect rect, ui.Paint paint) {
_canvas.drawRect(rect, paint as SurfacePaint);
}
@override
void drawRRect(ui.RRect rrect, ui.Paint paint) {
assert(rrectIsValid(rrect));
_drawRRect(rrect, paint);
}
void _drawRRect(ui.RRect rrect, ui.Paint paint) {
_canvas.drawRRect(rrect, paint as SurfacePaint);
}
@override
void drawDRRect(ui.RRect outer, ui.RRect inner, ui.Paint paint) {
assert(rrectIsValid(outer));
assert(rrectIsValid(inner));
_drawDRRect(outer, inner, paint);
}
void _drawDRRect(ui.RRect outer, ui.RRect inner, ui.Paint paint) {
_canvas.drawDRRect(outer, inner, paint as SurfacePaint);
}
@override
void drawOval(ui.Rect rect, ui.Paint paint) {
assert(rectIsValid(rect));
_drawOval(rect, paint);
}
void _drawOval(ui.Rect rect, ui.Paint paint) {
_canvas.drawOval(rect, paint as SurfacePaint);
}
@override
void drawCircle(ui.Offset c, double radius, ui.Paint paint) {
assert(offsetIsValid(c));
_drawCircle(c, radius, paint);
}
void _drawCircle(ui.Offset c, double radius, ui.Paint paint) {
_canvas.drawCircle(c, radius, paint as SurfacePaint);
}
@override
void drawArc(ui.Rect rect, double startAngle, double sweepAngle,
bool useCenter, ui.Paint paint) {
assert(rectIsValid(rect));
const double pi = math.pi;
const double pi2 = 2.0 * pi;
final ui.Path path = ui.Path();
if (useCenter) {
path.moveTo(
(rect.left + rect.right) / 2.0, (rect.top + rect.bottom) / 2.0);
}
bool forceMoveTo = !useCenter;
if (sweepAngle <= -pi2) {
path.arcTo(rect, startAngle, -pi, forceMoveTo);
startAngle -= pi;
path.arcTo(rect, startAngle, -pi, false);
startAngle -= pi;
forceMoveTo = false;
sweepAngle += pi2;
}
while (sweepAngle >= pi2) {
path.arcTo(rect, startAngle, pi, forceMoveTo);
startAngle += pi;
path.arcTo(rect, startAngle, pi, false);
startAngle += pi;
forceMoveTo = false;
sweepAngle -= pi2;
}
path.arcTo(rect, startAngle, sweepAngle, forceMoveTo);
if (useCenter) {
path.close();
}
_canvas.drawPath(path, paint as SurfacePaint);
}
@override
void drawPath(ui.Path path, ui.Paint paint) {
_drawPath(path, paint);
}
void _drawPath(ui.Path path, ui.Paint paint) {
_canvas.drawPath(path, paint as SurfacePaint);
}
@override
void drawImage(ui.Image image, ui.Offset offset, ui.Paint paint) {
assert(offsetIsValid(offset));
_drawImage(image, offset, paint);
}
void _drawImage(ui.Image image, ui.Offset p, ui.Paint paint) {
_canvas.drawImage(image, p, paint as SurfacePaint);
}
@override
void drawImageRect(ui.Image image, ui.Rect src, ui.Rect dst, ui.Paint paint) {
assert(rectIsValid(src));
assert(rectIsValid(dst));
_drawImageRect(image, src, dst, paint);
}
void _drawImageRect(
ui.Image image, ui.Rect src, ui.Rect dst, ui.Paint paint) {
_canvas.drawImageRect(image, src, dst, paint as SurfacePaint);
}
// Return a list of slice coordinates based on the size of the nine-slice parameters in
// one dimension. Each set of slice coordinates contains a begin/end pair for each of the
// source (image) and dest (screen) in the order (src0, dst0, src1, dst1).
// The area from src0 => src1 of the image is painted on the screen from dst0 => dst1
// The slices for each dimension are generated independently.
List<double> _initSlices(double img0, double imgC0, double imgC1, double img1, double dst0, double dst1) {
final double imageDim = img1 - img0;
final double destDim = dst1 - dst0;
if (imageDim == destDim) {
// If the src and dest are the same size then we do not need scaling
// We return 4 values for a single slice
return <double>[ img0, dst0, img1, dst1 ];
}
final double edge0Dim = imgC0 - img0;
final double edge1Dim = img1 - imgC1;
final double edgesDim = edge0Dim + edge1Dim;
if (edgesDim >= destDim) {
// the center portion has disappeared, leaving only the edges to scale to a common
// center position in the destination
// this produces only 2 slices which is 8 values
final double dstC = dst0 + destDim * edge0Dim / edgesDim;
return <double>[
img0, dst0, imgC0, dstC,
imgC1, dstC, img1, dst1,
];
}
// center portion is nonEmpty and only that part is scaled
// we need 3 slices which is 12 values
final double dstC0 = dst0 + edge0Dim;
final double dstC1 = dst1 - edge1Dim;
return <double>[
img0, dst0, imgC0, dstC0,
imgC0, dstC0, imgC1, dstC1,
imgC1, dstC1, img1, dst1
];
}
@override
void drawImageNine(
ui.Image image, ui.Rect center, ui.Rect dst, ui.Paint paint) {
assert(rectIsValid(center));
assert(rectIsValid(dst));
if (dst.isEmpty) {
return;
}
final List<double> hSlices = _initSlices(
0,
center.left,
center.right,
image.width.toDouble(),
dst.left,
dst.right,
);
final List<double> vSlices = _initSlices(
0,
center.top,
center.bottom,
image.height.toDouble(),
dst.top,
dst.bottom,
);
for (int yi = 0; yi < vSlices.length; yi += 4) {
final double srcY0 = vSlices[yi];
final double dstY0 = vSlices[yi + 1];
final double srcY1 = vSlices[yi + 2];
final double dstY1 = vSlices[yi + 3];
for (int xi = 0; xi < hSlices.length; xi += 4) {
final double srcX0 = hSlices[xi];
final double dstX0 = hSlices[xi + 1];
final double srcX1 = hSlices[xi + 2];
final double dstX1 = hSlices[xi + 3];
drawImageRect(
image,
ui.Rect.fromLTRB(srcX0, srcY0, srcX1, srcY1),
ui.Rect.fromLTRB(dstX0, dstY0, dstX1, dstY1),
paint,
);
}
}
}
@override
void drawPicture(ui.Picture picture) {
_canvas.drawPicture(picture);
}
@override
void drawParagraph(ui.Paragraph paragraph, ui.Offset offset) {
assert(offsetIsValid(offset));
_drawParagraph(paragraph, offset);
}
void _drawParagraph(ui.Paragraph paragraph, ui.Offset offset) {
_canvas.drawParagraph(paragraph, offset);
}
@override
void drawPoints(
ui.PointMode pointMode, List<ui.Offset> points, ui.Paint paint) {
final Float32List pointList = offsetListToFloat32List(points);
drawRawPoints(pointMode, pointList, paint);
}
@override
void drawRawPoints(
ui.PointMode pointMode, Float32List points, ui.Paint paint) {
if (points.length % 2 != 0) {
throw ArgumentError('"points" must have an even number of values.');
}
_canvas.drawRawPoints(pointMode, points, paint as SurfacePaint);
}
@override
void drawVertices(
ui.Vertices vertices, ui.BlendMode blendMode, ui.Paint paint) {
_canvas.drawVertices(
vertices as SurfaceVertices, blendMode, paint as SurfacePaint);
}
@override
void drawAtlas(
ui.Image atlas,
List<ui.RSTransform> transforms,
List<ui.Rect> rects,
List<ui.Color>? colors,
ui.BlendMode? blendMode,
ui.Rect? cullRect,
ui.Paint paint,
) {
assert(colors == null || colors.isEmpty || blendMode != null);
final int rectCount = rects.length;
if (transforms.length != rectCount) {
throw ArgumentError('"transforms" and "rects" lengths must match.');
}
if (colors != null && colors.isNotEmpty && colors.length != rectCount) {
throw ArgumentError(
'If non-null, "colors" length must match that of "transforms" and "rects".');
}
// TODO(het): Do we need to support this?
throw UnimplementedError();
}
@override
void drawRawAtlas(
ui.Image atlas,
Float32List rstTransforms,
Float32List rects,
Int32List? colors,
ui.BlendMode? blendMode,
ui.Rect? cullRect,
ui.Paint paint,
) {
assert(colors == null || blendMode != null);
final int rectCount = rects.length;
if (rstTransforms.length != rectCount) {
throw ArgumentError('"rstTransforms" and "rects" lengths must match.');
}
if (rectCount % 4 != 0) {
throw ArgumentError(
'"rstTransforms" and "rects" lengths must be a multiple of four.');
}
if (colors != null && colors.length * 4 != rectCount) {
throw ArgumentError(
'If non-null, "colors" length must be one fourth the length of "rstTransforms" and "rects".');
}
// TODO(het): Do we need to support this?
throw UnimplementedError();
}
@override
void drawShadow(
ui.Path path,
ui.Color color,
double elevation,
bool transparentOccluder,
) {
_canvas.drawShadow(path, color, elevation, transparentOccluder);
}
}
| engine/lib/web_ui/lib/src/engine/html/canvas.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/html/canvas.dart",
"repo_id": "engine",
"token_count": 5501
} | 287 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'package:ui/ui.dart' as ui;
import 'path_ref.dart';
/// Mask used to keep track of types of verbs used in a path segment.
class SPathSegmentMask {
static const int kLine_SkPathSegmentMask = 1 << 0;
static const int kQuad_SkPathSegmentMask = 1 << 1;
static const int kConic_SkPathSegmentMask = 1 << 2;
static const int kCubic_SkPathSegmentMask = 1 << 3;
}
/// Types of path operations.
class SPathVerb {
static const int kMove = 0; // 1 point
static const int kLine = 1; // 2 points
static const int kQuad = 2; // 3 points
static const int kConic = 3; // 3 points + 1 weight
static const int kCubic = 4; // 4 points
static const int kClose = 5; // 0 points
}
abstract final class SPath {
static const int kMoveVerb = SPathVerb.kMove;
static const int kLineVerb = SPathVerb.kLine;
static const int kQuadVerb = SPathVerb.kQuad;
static const int kConicVerb = SPathVerb.kConic;
static const int kCubicVerb = SPathVerb.kCubic;
static const int kCloseVerb = SPathVerb.kClose;
static const int kDoneVerb = SPathVerb.kClose + 1;
static const int kLineSegmentMask = SPathSegmentMask.kLine_SkPathSegmentMask;
static const int kQuadSegmentMask = SPathSegmentMask.kQuad_SkPathSegmentMask;
static const int kConicSegmentMask =
SPathSegmentMask.kConic_SkPathSegmentMask;
static const int kCubicSegmentMask =
SPathSegmentMask.kCubic_SkPathSegmentMask;
static const double scalarNearlyZero = 1.0 / (1 << 12);
/// Square root of 2 divided by 2. Useful for sin45 = cos45 = 1/sqrt(2).
static const double scalarRoot2Over2 = 0.707106781;
/// True if (a <= b <= c) || (a >= b >= c)
static bool between(double a, double b, double c) {
return (a - b) * (c - b) <= 0;
}
/// Returns -1 || 0 || 1 depending on the sign of value:
/// -1 if x < 0
/// 0 if x == 0
/// 1 if x > 0
static int scalarSignedAsInt(double x) {
return x < 0 ? -1 : ((x > 0) ? 1 : 0);
}
static bool nearlyEqual(double value1, double value2) =>
(value1 - value2).abs() < SPath.scalarNearlyZero;
// Snaps a value to zero if almost zero (within tolerance).
static double snapToZero(double value) => SPath.nearlyEqual(value, 0.0) ? 0.0 : value;
static bool isInteger(double value) => value.floor() == value;
}
class SPathAddPathMode {
// Append to destination unaltered.
static const int kAppend = 0;
// Add line if prior contour is not closed.
static const int kExtend = 1;
}
class SPathDirection {
/// Uninitialized value for empty paths.
static const int kUnknown = -1;
/// clockwise direction for adding closed contours.
static const int kCW = 0;
/// counter-clockwise direction for adding closed contours.
static const int kCCW = 1;
}
class SPathConvexityType {
static const int kUnknown = -1;
static const int kConvex = 0;
static const int kConcave = 1;
}
class SPathSegmentState {
/// The current contour is empty. Starting processing or have just closed
/// a contour.
static const int kEmptyContour = 0;
/// Have seen a move, but nothing else.
static const int kAfterMove = 1;
/// Have seen a primitive but not yet closed the path. Also the initial state.
static const int kAfterPrimitive = 2;
}
/// Quadratic roots. See Numerical Recipes in C.
///
/// Q = -1/2 (B + sign(B) sqrt[B*B - 4*A*C])
/// x1 = Q / A
/// x2 = C / Q
class QuadRoots {
QuadRoots();
double? root0;
double? root1;
/// Returns roots as list.
List<double> get roots => (root0 == null)
? <double>[]
: (root1 == null ? <double>[root0!] : <double>[root0!, root1!]);
int findRoots(double a, double b, double c) {
int rootCount = 0;
if (a == 0) {
root0 = validUnitDivide(-c, b);
return root0 == null ? 0 : 1;
}
double dr = b * b - 4 * a * c;
if (dr < 0) {
return 0;
}
dr = math.sqrt(dr);
if (!dr.isFinite) {
return 0;
}
final double q = (b < 0) ? -(b - dr) / 2 : -(b + dr) / 2;
double? res = validUnitDivide(q, a);
if (res != null) {
root0 = res;
++rootCount;
}
res = validUnitDivide(c, q);
if (res != null) {
if (rootCount == 0) {
root0 = res;
++rootCount;
} else {
root1 = res;
++rootCount;
}
}
if (rootCount == 2) {
if (root0! > root1!) {
final double swap = root0!;
root0 = root1;
root1 = swap;
} else if (root0 == root1) {
return 1; // skip the double root
}
}
return rootCount;
}
}
double? validUnitDivide(double numer, double denom) {
if (numer < 0) {
numer = -numer;
denom = -denom;
}
if (denom == 0 || numer == 0 || numer >= denom) {
return null;
}
final double r = numer / denom;
if (r.isNaN) {
return null;
}
if (r == 0) {
// catch underflow if numer <<<< denom
return null;
}
return r;
}
bool isRRectOval(ui.RRect rrect) {
if ((rrect.tlRadiusX + rrect.trRadiusX) != rrect.width) {
return false;
}
if ((rrect.tlRadiusY + rrect.trRadiusY) != rrect.height) {
return false;
}
if (rrect.tlRadiusX != rrect.blRadiusX ||
rrect.trRadiusX != rrect.brRadiusX ||
rrect.tlRadiusY != rrect.blRadiusY ||
rrect.trRadiusY != rrect.brRadiusY) {
return false;
}
return true;
}
/// Evaluates degree 2 polynomial (quadratic).
double polyEval(double A, double B, double C, double t) => (A * t + B) * t + C;
/// Evaluates degree 3 polynomial (cubic).
double polyEval4(double A, double B, double C, double D, double t) =>
((A * t + B) * t + C) * t + D;
// Interpolate between two doubles (Not using lerpDouble here since it null
// checks and treats values as 0).
double interpolate(double startValue, double endValue, double t) =>
(startValue * (1 - t)) + endValue * t;
double dotProduct(double x0, double y0, double x1, double y1) {
return x0 * x1 + y0 * y1;
}
// Helper class for computing convexity for a single contour.
//
// Iteratively looks at angle (using cross product) between consecutive vectors
// formed by path.
class Convexicator {
static const int kValueNeverReturnedBySign = 2;
// Second point of contour start that forms a vector.
// Used to handle close operator to compute angle between last vector and
// first.
double? firstVectorEndPointX;
double? firstVectorEndPointY;
double? priorX;
double? priorY;
double? lastX;
double? lastY;
double? currX;
double? currY;
// Last vector to use to compute angle.
double? lastVecX;
double? lastVecY;
bool _isFinite = true;
int _firstDirection = SPathDirection.kUnknown;
int _reversals = 0;
/// SPathDirection of contour.
int get firstDirection => _firstDirection;
DirChange _expectedDirection = DirChange.kInvalid;
void setMovePt(double x, double y) {
currX = priorX = lastX = x;
currY = priorY = lastY = y;
}
bool addPoint(double x, double y) {
if (x == currX && y == currY) {
// Skip zero length vector.
return true;
}
currX = x;
currY = y;
final double vecX = currX! - lastX!;
final double vecY = currY! - lastY!;
if (priorX == lastX && priorY == lastY) {
// First non-zero vector.
lastVecX = vecX;
lastVecY = vecY;
firstVectorEndPointX = x;
firstVectorEndPointY = y;
} else if (!_addVector(vecX, vecY)) {
return false;
}
priorX = lastX;
priorY = lastY;
lastX = x;
lastY = y;
return true;
}
bool close() {
// Add another point from path closing point to end of first vector.
return addPoint(firstVectorEndPointX!, firstVectorEndPointY!);
}
bool get isFinite => _isFinite;
int get reversals => _reversals;
DirChange _directionChange(double curVecX, double curVecY) {
// Cross product = ||lastVec|| * ||curVec|| * sin(theta) * N
// sin(theta) angle between two vectors is positive for angles 0..180 and
// negative for greater, providing left or right direction.
final double lastX = lastVecX!;
final double lastY = lastVecY!;
final double cross = lastX * curVecY - lastY * curVecX;
if (!cross.isFinite) {
return DirChange.kUnknown;
}
// Detect straight and backwards direction change.
// Instead of comparing absolute crossproduct size, compare
// largest component double+crossproduct.
final double smallest =
math.min(curVecX, math.min(curVecY, math.min(lastX, lastY)));
final double largest = math.max(
math.max(curVecX, math.max(curVecY, math.max(lastX, lastY))),
-smallest);
if (SPath.nearlyEqual(largest, largest + cross)) {
const double nearlyZeroSquared =
SPath.scalarNearlyZero * SPath.scalarNearlyZero;
if (SPath.nearlyEqual(lengthSquared(lastX, lastY), nearlyZeroSquared) ||
SPath.nearlyEqual(lengthSquared(curVecX, curVecY), nearlyZeroSquared)) {
// Length of either vector is smaller than tolerance to be able
// to compute direction.
return DirChange.kUnknown;
}
// The vectors are parallel, sign of dot product gives us direction.
// cosine is positive for straight -90 < Theta < 90
return dotProduct(lastX, lastY, curVecX, curVecY) < 0
? DirChange.kBackwards
: DirChange.kStraight;
}
return cross > 0 ? DirChange.kRight : DirChange.kLeft;
}
bool _addVector(double curVecX, double curVecY) {
final DirChange dir = _directionChange(curVecX, curVecY);
final bool isDirectionRight = dir == DirChange.kRight;
if (dir == DirChange.kLeft || isDirectionRight) {
if (_expectedDirection == DirChange.kInvalid) {
// First valid direction. From this point on expect always left.
_expectedDirection = dir;
_firstDirection =
isDirectionRight ? SPathDirection.kCW : SPathDirection.kCCW;
} else if (dir != _expectedDirection) {
_firstDirection = SPathDirection.kUnknown;
return false;
}
lastVecX = curVecX;
lastVecY = curVecY;
} else {
switch (dir) {
case DirChange.kBackwards:
// Allow path to reverse direction twice.
// Given path.moveTo(0,0) lineTo(1,1)
// - First reversal: direction change formed by line (0,0 1,1),
// line (1,1 0,0)
// - Second reversal: direction change formed by line (1,1 0,0),
// line (0,0 1,1)
lastVecX = curVecX;
lastVecY = curVecY;
return ++_reversals < 3;
case DirChange.kUnknown:
return _isFinite = false;
default:
break;
}
}
return true;
}
// Quick test to detect concave by looking at number of changes in direction
// of vectors formed by path points (excluding control points).
static int bySign(PathRef pathRef, int pointIndex, int numPoints) {
final int lastPointIndex = pointIndex + numPoints;
int currentPoint = pointIndex++;
final int firstPointIndex = currentPoint;
int signChangeCountX = 0;
int signChangeCountY = 0;
int lastSx = kValueNeverReturnedBySign;
int lastSy = kValueNeverReturnedBySign;
for (int outerLoop = 0; outerLoop < 2; ++outerLoop) {
while (pointIndex != lastPointIndex) {
final double vecX = pathRef.pointXAt(pointIndex) -
pathRef.pointXAt(currentPoint);
final double vecY = pathRef.pointYAt(pointIndex) -
pathRef.pointYAt(currentPoint);
if (!(vecX == 0 && vecY == 0)) {
// Give up if vector construction failed.
// give up if vector construction failed
if (!(vecX.isFinite && vecY.isFinite)) {
return SPathConvexityType.kUnknown;
}
final int sx = vecX < 0 ? 1 : 0;
final int sy = vecY < 0 ? 1 : 0;
signChangeCountX += (sx != lastSx) ? 1 : 0;
signChangeCountY += (sy != lastSy) ? 1 : 0;
if (signChangeCountX > 3 || signChangeCountY > 3) {
return SPathConvexityType.kConcave;
}
lastSx = sx;
lastSy = sy;
}
currentPoint = pointIndex++;
if (outerLoop != 0) {
break;
}
}
pointIndex = firstPointIndex;
}
return SPathConvexityType.kConvex;
}
}
enum DirChange {
kUnknown,
kLeft,
kRight,
kStraight,
kBackwards, // if double back, allow simple lines to be convex
kInvalid
}
double lengthSquaredOffset(ui.Offset offset) {
final double dx = offset.dx;
final double dy = offset.dy;
return dx * dx + dy * dy;
}
double lengthSquared(double dx, double dy) => dx * dx + dy * dy;
/// Evaluates A * t^2 + B * t + C = 0 for quadratic curve.
class SkQuadCoefficients {
SkQuadCoefficients(
double x0, double y0, double x1, double y1, double x2, double y2)
: cx = x0,
cy = y0,
bx = 2 * (x1 - x0),
by = 2 * (y1 - y0),
ax = x2 - (2 * x1) + x0,
ay = y2 - (2 * y1) + y0;
final double ax, ay, bx, by, cx, cy;
double evalX(double t) => (ax * t + bx) * t + cx;
double evalY(double t) => (ay * t + by) * t + cy;
}
| engine/lib/web_ui/lib/src/engine/html/path/path_utils.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/html/path/path_utils.dart",
"repo_id": "engine",
"token_count": 5325
} | 288 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:ui/ui.dart' as ui;
import '../../browser_detection.dart';
/// Creates shader program for target webgl version.
///
/// See spec at https://www.khronos.org/registry/webgl/specs/latest/1.0/.
///
/// Differences in WebGL2 vs WebGL1.
/// - WebGL2 needs '#version 300 es' to enable the new shading language
/// - vertex attributes have the qualifier 'in' instead of 'attribute'
/// - GLSL 3.00 defines texture and other new and future reserved words.
/// - varying is now called `in`.
/// - GLSL 1.00 has a predefined variable gl_FragColor which now needs to be
/// defined as `out vec4 fragmentColor`.
/// - Texture lookup functions texture2D and textureCube have now been
/// replaced with texture.
///
/// Example usage:
/// ShaderBuilder builder = ShaderBuilder(WebGlVersion.webgl2);
/// ShaderDeclaration u1 = builder.addUniform(ShaderType.kVec4);
/// ShaderMethod method = builder.addMethod('main');
/// method.addStatement('${u1.name} = vec4(1.0, 1.0, 1.0, 0.0);');
/// source = builder.build();
class ShaderBuilder {
ShaderBuilder(this.version) : isWebGl2 = version == WebGLVersion.webgl2,
_isFragmentShader = false;
ShaderBuilder.fragment(this.version) :
isWebGl2 = version == WebGLVersion.webgl2,
_isFragmentShader = true;
/// WebGL version.
final int version;
final List<ShaderDeclaration> declarations = <ShaderDeclaration>[];
final List<ShaderMethod> _methods = <ShaderMethod>[];
/// Precision for integer variables.
int? integerPrecision;
/// Precision floating point variables.
int? floatPrecision;
/// Counter for generating unique name if name is not specified for attribute.
int _attribCounter = 0;
/// Counter for generating unique name if name is not specified for varying.
int _varyingCounter = 0;
/// Counter for generating unique name if name is not specified for uniform.
int _uniformCounter = 0;
/// Counter for generating unique name if name is not specified for constant.
int _constCounter = 0;
final bool isWebGl2;
final bool _isFragmentShader;
static const String kOpenGlEs3Header = '#version 300 es';
/// Lazily allocated fragment color output.
ShaderDeclaration? _fragmentColorDeclaration;
/// Returns fragment color declaration for fragment shader.
///
/// This is hard coded for webgl1 as gl_FragColor.
ShaderDeclaration get fragmentColor {
_fragmentColorDeclaration ??= ShaderDeclaration(
isWebGl2 ? 'gFragColor' : 'gl_FragColor',
ShaderType.kVec4,
ShaderStorageQualifier.kVarying);
return _fragmentColorDeclaration!;
}
/// Adds an attribute.
///
/// The attribute variable is assigned a value from a object buffer as a
/// series of graphics primitives are rendered. The value is only accessible
/// in the vertex shader.
ShaderDeclaration addIn(int dataType, {String? name}) {
final ShaderDeclaration attrib = ShaderDeclaration(
name ?? 'attr_${_attribCounter++}',
dataType,
ShaderStorageQualifier.kAttribute);
declarations.add(attrib);
return attrib;
}
/// Adds a constant.
ShaderDeclaration addConst(int dataType, String value, {String? name}) {
final ShaderDeclaration declaration = ShaderDeclaration.constant(
name ?? 'c_${_constCounter++}', dataType, value);
declarations.add(declaration);
return declaration;
}
/// Adds a uniform variable.
///
/// The variable is assigned a value before a gl.draw call.
/// It is accessible in both the vertex and fragment shaders.
///
ShaderDeclaration addUniform(int dataType, {String? name}) {
final ShaderDeclaration uniform = ShaderDeclaration(
name ?? 'uni_${_uniformCounter++}',
dataType,
ShaderStorageQualifier.kUniform);
declarations.add(uniform);
return uniform;
}
/// Adds a varying variable.
///
/// The variable is assigned a value by a vertex shader and
/// interpolated across the surface of a graphics primitive for each
/// input to a fragment shader.
/// It can be used in a fragment shader, but not changed.
ShaderDeclaration addOut(int dataType, {String? name}) {
final ShaderDeclaration varying = ShaderDeclaration(
name ?? 'output_${_varyingCounter++}',
dataType,
ShaderStorageQualifier.kVarying);
declarations.add(varying);
return varying;
}
void _writeVariableDeclaration(StringBuffer sb, ShaderDeclaration variable) {
switch (variable.storage) {
case ShaderStorageQualifier.kConst:
_buffer.write('const ');
case ShaderStorageQualifier.kAttribute:
_buffer.write(isWebGl2 ? 'in '
: _isFragmentShader ? 'varying ' : 'attribute ');
case ShaderStorageQualifier.kUniform:
_buffer.write('uniform ');
case ShaderStorageQualifier.kVarying:
_buffer.write(isWebGl2 ? 'out ' : 'varying ');
}
_buffer.write('${typeToString(variable.dataType)} ${variable.name}');
if (variable.storage == ShaderStorageQualifier.kConst) {
_buffer.write(' = ${variable.constValue}');
}
_buffer.writeln(';');
}
final StringBuffer _buffer = StringBuffer();
static String typeToString(int dataType) {
switch (dataType) {
case ShaderType.kBool:
return 'bool';
case ShaderType.kInt:
return 'int';
case ShaderType.kFloat:
return 'float';
case ShaderType.kBVec2:
return 'bvec2';
case ShaderType.kBVec3:
return 'bvec3';
case ShaderType.kBVec4:
return 'bvec4';
case ShaderType.kIVec2:
return 'ivec2';
case ShaderType.kIVec3:
return 'ivec3';
case ShaderType.kIVec4:
return 'ivec4';
case ShaderType.kVec2:
return 'vec2';
case ShaderType.kVec3:
return 'vec3';
case ShaderType.kVec4:
return 'vec4';
case ShaderType.kMat2:
return 'mat2';
case ShaderType.kMat3:
return 'mat3';
case ShaderType.kMat4:
return 'mat4';
case ShaderType.kSampler1D:
return 'sampler1D';
case ShaderType.kSampler2D:
return 'sampler2D';
case ShaderType.kSampler3D:
return 'sampler3D';
case ShaderType.kVoid:
return 'void';
}
throw ArgumentError();
}
ShaderMethod addMethod(String name) {
final ShaderMethod method = ShaderMethod(name);
_methods.add(method);
return method;
}
String build() {
// Write header.
if (isWebGl2) {
_buffer.writeln(kOpenGlEs3Header);
}
// Write optional precision.
if (integerPrecision != null) {
_buffer
.writeln('precision ${_precisionToString(integerPrecision!)} int;');
}
if (floatPrecision != null) {
_buffer
.writeln('precision ${_precisionToString(floatPrecision!)} float;');
}
if (isWebGl2 && _fragmentColorDeclaration != null) {
_writeVariableDeclaration(_buffer, _fragmentColorDeclaration!);
}
for (final ShaderDeclaration decl in declarations) {
_writeVariableDeclaration(_buffer, decl);
}
for (final ShaderMethod method in _methods) {
method.write(_buffer);
}
return _buffer.toString();
}
String _precisionToString(int precision) => precision == ShaderPrecision.kLow
? 'lowp'
: precision == ShaderPrecision.kMedium ? 'mediump' : 'highp';
String get texture2DFunction => isWebGl2 ? 'texture' : 'texture2D';
}
class ShaderMethod {
ShaderMethod(this.name);
final String returnType = 'void';
final String name;
final List<String> _statements = <String>[];
int _indentLevel = 1;
void indent() {
++_indentLevel;
}
void unindent() {
assert(_indentLevel != 1);
--_indentLevel;
}
void addStatement(String statement) {
String itemToAdd = statement;
assert(() {
itemToAdd = ' ' * _indentLevel + statement;
return true;
}());
_statements.add(itemToAdd);
}
/// Adds statements to compute tiling in 0..1 coordinate space.
///
/// For clamp we simply assign source value to destination.
///
/// For repeat, we use fractional part of source value.
/// float destination = fract(source);
///
/// For mirror, we repeat every 2 units, by scaling and measuring distance
/// from floor.
/// float destination = 1.0 - source;
/// destination = abs((destination - 2.0 * floor(destination * 0.5)) - 1.0);
void addTileStatements(String source, String destination,
ui.TileMode tileMode) {
switch(tileMode) {
case ui.TileMode.repeated:
addStatement('float $destination = fract($source);');
case ui.TileMode.mirror:
addStatement('float $destination = ($source - 1.0);');
addStatement(
'$destination = '
'abs(($destination - 2.0 * floor($destination * 0.5)) - 1.0);');
case ui.TileMode.clamp:
case ui.TileMode.decal:
addStatement('float $destination = $source;');
}
}
void write(StringBuffer buffer) {
buffer.writeln('$returnType $name() {');
_statements.forEach(buffer.writeln);
buffer.writeln('}');
}
}
/// WebGl Shader data types.
abstract class ShaderType {
// Basic types.
static const int kBool = 0;
static const int kInt = 1;
static const int kFloat = 2;
// Vector types.
static const int kBVec2 = 3;
static const int kBVec3 = 4;
static const int kBVec4 = 5;
static const int kIVec2 = 6;
static const int kIVec3 = 7;
static const int kIVec4 = 8;
static const int kVec2 = 9;
static const int kVec3 = 10;
static const int kVec4 = 11;
static const int kMat2 = 12;
static const int kMat3 = 13;
static const int kMat4 = 14;
// Textures.
static const int kSampler1D = 15;
static const int kSampler2D = 16;
static const int kSampler3D = 17;
// Other.
static const int kVoid = 18;
}
/// Precision of int and float types.
///
/// Integers: 8 bit, 10 bit and 16 bits.
/// Float: 8 bit. 14 bit and 62 bits.
abstract class ShaderPrecision {
static const int kLow = 0;
static const int kMedium = 1;
static const int kHigh = 2;
}
/// GL Variable storage qualifiers.
abstract class ShaderStorageQualifier {
static const int kConst = 0;
static const int kAttribute = 1;
static const int kUniform = 2;
static const int kVarying = 3;
}
/// Shader variable and constant declaration.
class ShaderDeclaration {
ShaderDeclaration(this.name, this.dataType, this.storage)
: assert(!_isGLSLReservedWord(name)),
constValue = '';
/// Constructs a constant.
ShaderDeclaration.constant(this.name, this.dataType, this.constValue)
: storage = ShaderStorageQualifier.kConst;
final String name;
final int dataType;
final int storage;
final String constValue;
}
// These are used only in debug mode to assert if used as variable name.
// https://www.khronos.org/registry/OpenGL/specs/gl/GLSLangSpec.4.10.pdf
const List<String> _kReservedWords = <String>[
'attribute',
'const',
'uniform',
'varying',
'layout',
'centroid',
'flat',
'smooth',
'noperspective',
'patch', 'sample',
'break', 'continue',
'do', 'for', 'while', 'switch', 'case', 'default', 'if', 'else',
'subroutine',
'in', 'out', 'inout', 'float', 'double', 'int',
'void',
'bool', 'true', 'false',
'invariant',
'discard', 'return',
'mat2', 'mat3', 'mat4', 'dmat2', 'dmat3', 'dmat4',
'mat2x2', 'mat2x3', 'mat2x4', 'dmat2x2', 'dmat2x3', 'dmat2x4',
'mat3x2', 'mat3x3', 'mat3x4', 'dmat3x2', 'dmat3x3', 'dmat3x4',
'mat4x2', 'mat4x3', 'mat4x4', 'dmat4x2', 'dmat4x3', 'dmat4x4',
'vec2', 'vec3', 'vec4', 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4',
'dvec2', 'dvec3', 'dvec4',
'uint', 'uvec2', 'uvec3', 'uvec4',
'lowp', 'mediump', 'highp', 'precision',
'sampler1D', 'sampler2D', 'sampler3D', 'samplerCube',
'sampler1DShadow', 'sampler2DShadow', 'samplerCubeShadow',
'sampler1DArray', 'sampler2DArray',
'sampler1DArrayShadow', 'sampler2DArrayShadow',
'isampler1D', 'isampler2D', 'isampler3D', 'isamplerCube',
'isampler1DArray', 'isampler2DArray',
'usampler1D', 'usampler2D', 'usampler3D', 'usamplerCube',
'usampler1DArray', 'usampler2DArray',
'sampler2DRect', 'sampler2DRectShadow', 'isampler2DRect', 'usampler2DRect',
'samplerBuffer', 'isamplerBuffer', 'usamplerBuffer',
'sampler2DMS', 'isampler2DMS', 'usampler2DMS',
'sampler2DMSArray', 'isampler2DMSArray', 'usampler2DMSArray',
'samplerCubeArray', 'samplerCubeArrayShadow', 'isamplerCubeArray',
'usamplerCubeArray',
'struct',
'texture',
// Reserved for future use, see
// https://www.khronos.org/registry/OpenGL/specs/gl/GLSLangSpec.4.10.pdf
'active', 'asm', 'cast', 'class', 'common', 'enum', 'extern', 'external',
'filter', 'fixed', 'fvec2', 'fvec3', 'fvec4', 'goto', 'half', 'hvec2',
'hvec3', 'hvec4', 'iimage1D', 'iimage1DArray', 'iimage2D', 'iimage2DArray',
'iimage3D', 'iimageBuffer', 'iimageCube', 'image1D', 'image1DArray',
'image1DArrayShadow', 'image1DShadow', 'image2D', 'image2DArray',
'image2DArrayShadow', 'image2DShadow', 'image3D', 'imageBuffer',
'imageCube', 'inline', 'input', 'interface', 'long',
'namespace', 'noinline', 'output', 'packed', 'partition', 'public',
'row_major', 'sampler3DRect', 'short', 'sizeof', 'static', 'superp', 'template', 'this',
'typedef', 'uimage1D', 'uimage1DArray', 'uimage2D', 'uimage2DArray',
'uimage3D', 'uimageBuffer', 'uimageCube', 'union', 'unsigned',
'using', 'volatile',
];
bool _isGLSLReservedWord(String name) {
return _kReservedWords.contains(name);
}
| engine/lib/web_ui/lib/src/engine/html/shaders/shader_builder.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/html/shaders/shader_builder.dart",
"repo_id": "engine",
"token_count": 5171
} | 289 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import '../dom.dart';
const String _kDefaultCursor = 'default';
/// Controls the mouse cursor in the given [element].
class MouseCursor {
MouseCursor(this.element);
final DomElement element;
// Map from Flutter's kind values to CSS's cursor values.
//
// This map must be kept in sync with Flutter framework's
// rendering/mouse_cursor.dart.
static const Map<String, String> _kindToCssValueMap = <String, String>{
'alias': 'alias',
'allScroll': 'all-scroll',
'basic': _kDefaultCursor,
'cell': 'cell',
'click': 'pointer',
'contextMenu': 'context-menu',
'copy': 'copy',
'forbidden': 'not-allowed',
'grab': 'grab',
'grabbing': 'grabbing',
'help': 'help',
'move': 'move',
'none': 'none',
'noDrop': 'no-drop',
'precise': 'crosshair',
'progress': 'progress',
'text': 'text',
'resizeColumn': 'col-resize',
'resizeDown': 's-resize',
'resizeDownLeft': 'sw-resize',
'resizeDownRight': 'se-resize',
'resizeLeft': 'w-resize',
'resizeLeftRight': 'ew-resize',
'resizeRight': 'e-resize',
'resizeRow': 'row-resize',
'resizeUp': 'n-resize',
'resizeUpDown': 'ns-resize',
'resizeUpLeft': 'nw-resize',
'resizeUpRight': 'ne-resize',
'resizeUpLeftDownRight': 'nwse-resize',
'resizeUpRightDownLeft': 'nesw-resize',
'verticalText': 'vertical-text',
'wait': 'wait',
'zoomIn': 'zoom-in',
'zoomOut': 'zoom-out',
};
static String _mapKindToCssValue(String? kind) {
return _kindToCssValueMap[kind] ?? _kDefaultCursor;
}
void activateSystemCursor(String? kind) {
final String cssValue = _mapKindToCssValue(kind);
// TODO(mdebbar): This should be set on the element, not the body. In order
// to do that, we need the framework to send us the view ID.
// https://github.com/flutter/flutter/issues/140226
if (cssValue == _kDefaultCursor) {
domDocument.body!.style.removeProperty('cursor');
} else {
domDocument.body!.style.cursor = cssValue;
}
}
}
| engine/lib/web_ui/lib/src/engine/mouse/cursor.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/mouse/cursor.dart",
"repo_id": "engine",
"token_count": 886
} | 290 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui show Offset;
import '../dom.dart';
import '../semantics.dart' show EngineSemantics;
import '../text_editing/text_editing.dart';
import '../vector_math.dart';
import '../window.dart';
/// Returns an [ui.Offset] of the position of [event], relative to the position of [actualTarget].
///
/// The offset is *not* multiplied by DPR or anything else, it's the closest
/// to what the DOM would return if we had currentTarget readily available.
///
/// This needs an `actualTarget`, because the `event.currentTarget` (which is what
/// this would really need to use) gets lost when the `event` comes from a "coalesced"
/// event.
///
/// It also takes into account semantics being enabled to fix the case where
/// offsetX, offsetY == 0 (TalkBack events).
ui.Offset computeEventOffsetToTarget(DomMouseEvent event, EngineFlutterView view) {
final DomElement actualTarget = view.dom.rootElement;
// On a TalkBack event
if (EngineSemantics.instance.semanticsEnabled && event.offsetX == 0 && event.offsetY == 0) {
return _computeOffsetForTalkbackEvent(event, actualTarget);
}
// On one of our text-editing nodes
final bool isInput = view.dom.textEditingHost.contains(event.target! as DomNode);
if (isInput) {
final EditableTextGeometry? inputGeometry = textEditing.strategy.geometry;
if (inputGeometry != null) {
return _computeOffsetForInputs(event, inputGeometry);
}
}
// On another DOM Element (normally a platform view)
final bool isTargetOutsideOfShadowDOM = event.target != actualTarget;
if (isTargetOutsideOfShadowDOM) {
final DomRect origin = actualTarget.getBoundingClientRect();
// event.clientX/Y and origin.x/y are relative **to the viewport**.
// (This doesn't work with 3D translations of the parent element.)
// TODO(dit): Make this understand 3D transforms, https://github.com/flutter/flutter/issues/117091
return ui.Offset(event.clientX - origin.x, event.clientY - origin.y);
}
// Return the offsetX/Y in the normal case.
// (This works with 3D translations of the parent element.)
return ui.Offset(event.offsetX, event.offsetY);
}
/// Computes the offsets for input nodes, which live outside of the shadowDOM.
/// Since inputs can be transformed (scaled, translated, etc), we can't rely on
/// `_computeOffsetRelativeToActualTarget` to calculate accurate coordinates, as
/// it only handles the case where inputs are translated, but will have issues
/// for scaled inputs (see: https://github.com/flutter/flutter/issues/125948).
///
/// We compute the offsets here by using the text input geometry data that is
/// sent from the framework, which includes information on how to transform the
/// underlying input element. We transform the `event.offset` points we receive
/// using the values from the input's transform matrix.
ui.Offset _computeOffsetForInputs(DomMouseEvent event, EditableTextGeometry inputGeometry) {
final DomElement targetElement = event.target! as DomHTMLElement;
final DomHTMLElement domElement = textEditing.strategy.activeDomElement;
assert(targetElement == domElement, 'The targeted input element must be the active input element');
final Float32List transformValues = inputGeometry.globalTransform;
assert(transformValues.length == 16);
final Matrix4 transform = Matrix4.fromFloat32List(transformValues);
final Vector3 transformedPoint = transform.perspectiveTransform(x: event.offsetX, y: event.offsetY, z: 0);
return ui.Offset(transformedPoint.x, transformedPoint.y);
}
/// Computes the event offset when TalkBack is firing the event.
///
/// In this case, we need to use the clientX/Y position of the event (which are
/// relative to the absolute top-left corner of the page, including scroll), then
/// deduct the offsetLeft/Top from every offsetParent of the `actualTarget`.
///
/// ×-Page----║-------------------------------+
/// | ║ |
/// | ×-------║--------offsetParent(s)-----+ |
/// | |\ | |
/// | | offsetLeft, offsetTop | |
/// | | | |
/// | | | |
/// | | ×-----║-------------actualTarget-+ | |
/// | | | | | |
/// ═════ × ─ (scrollLeft, scrollTop)═ ═ ═
/// | | | | | |
/// | | | × | | |
/// | | | \ | | |
/// | | | clientX, clientY | | |
/// | | | (Relative to Page + Scroll) | | |
/// | | +-----║--------------------------+ | |
/// | +-------║----------------------------+ |
/// +---------║-------------------------------+
///
/// Computing the offset of the event relative to the actualTarget requires to
/// compute the clientX, clientY of the actualTarget. To do that, we iterate
/// up the offsetParent elements of actualTarget adding their offset and scroll
/// positions. Finally, we deduct that from clientX, clientY of the event.
// TODO(dit): Make this understand 3D transforms, https://github.com/flutter/flutter/issues/117091
ui.Offset _computeOffsetForTalkbackEvent(DomMouseEvent event, DomElement actualTarget) {
assert(EngineSemantics.instance.semanticsEnabled);
// Use clientX/clientY as the position of the event (this is relative to
// the top left of the page, including scroll)
double offsetX = event.clientX;
double offsetY = event.clientY;
// Compute the scroll offset of actualTarget
DomHTMLElement parent = actualTarget as DomHTMLElement;
while(parent.offsetParent != null){
offsetX -= parent.offsetLeft - parent.scrollLeft;
offsetY -= parent.offsetTop - parent.scrollTop;
parent = parent.offsetParent!;
}
return ui.Offset(offsetX, offsetY);
}
| engine/lib/web_ui/lib/src/engine/pointer_binding/event_position_helper.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/pointer_binding/event_position_helper.dart",
"repo_id": "engine",
"token_count": 1968
} | 291 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:ui/ui.dart' as ui;
import '../dom.dart';
import '../platform_dispatcher.dart';
import 'focusable.dart';
import 'label_and_value.dart';
import 'semantics.dart';
/// Adds increment/decrement event handling to a semantics object.
///
/// The implementation uses a hidden `<input type="range">` element with ARIA
/// attributes to cause the browser to render increment/decrement controls to
/// the assistive technology.
///
/// The input element is disabled whenever the gesture mode switches to pointer
/// events. This is to prevent the browser from taking over drag gestures. Drag
/// gestures must be interpreted by the Flutter framework.
class Incrementable extends PrimaryRoleManager {
Incrementable(SemanticsObject semanticsObject)
: _focusManager = AccessibilityFocusManager(semanticsObject.owner),
super.blank(PrimaryRole.incrementable, semanticsObject) {
// The following generic roles can coexist with incrementables. Generic focus
// management is not used by this role because the root DOM element is not
// the one being focused on, but the internal `<input>` element.
addLiveRegion();
addRouteName();
addLabelAndValue(labelRepresentation: LeafLabelRepresentation.ariaLabel);
append(_element);
_element.type = 'range';
_element.setAttribute('role', 'slider');
_element.addEventListener('change', createDomEventListener((_) {
if (_element.disabled!) {
return;
}
_pendingResync = true;
final int newInputValue = int.parse(_element.value!);
if (newInputValue > _currentSurrogateValue) {
_currentSurrogateValue += 1;
EnginePlatformDispatcher.instance.invokeOnSemanticsAction(
semanticsObject.id, ui.SemanticsAction.increase, null);
} else if (newInputValue < _currentSurrogateValue) {
_currentSurrogateValue -= 1;
EnginePlatformDispatcher.instance.invokeOnSemanticsAction(
semanticsObject.id, ui.SemanticsAction.decrease, null);
}
}));
// Store the callback as a closure because Dart does not guarantee that
// tear-offs produce the same function object.
_gestureModeListener = (GestureMode mode) {
update();
};
EngineSemantics.instance.addGestureModeListener(_gestureModeListener);
_focusManager.manage(semanticsObject.id, _element);
}
@override
bool focusAsRouteDefault() {
_element.focus();
return true;
}
/// The HTML element used to render semantics to the browser.
final DomHTMLInputElement _element = createDomHTMLInputElement();
final AccessibilityFocusManager _focusManager;
/// The value used by the input element.
///
/// Flutter values are strings, and are not necessarily numbers. In order to
/// convey to the browser what the available "range" of values is we
/// substitute the framework value with a generated `int` surrogate.
/// "aria-valuetext" attribute is used to cause the browser to announce the
/// framework value to the user.
int _currentSurrogateValue = 1;
/// Disables the input [_element] when the gesture mode switches to
/// [GestureMode.pointerEvents], and enables it when the mode switches back to
/// [GestureMode.browserGestures].
late final GestureModeCallback _gestureModeListener;
/// Whether we forwarded a semantics action to the framework and awaiting an
/// update.
///
/// This field is used to determine whether the HTML DOM of the semantics
/// tree should be updated.
bool _pendingResync = false;
@override
void update() {
super.update();
switch (EngineSemantics.instance.gestureMode) {
case GestureMode.browserGestures:
_enableBrowserGestureHandling();
_updateInputValues();
case GestureMode.pointerEvents:
_disableBrowserGestureHandling();
}
_focusManager.changeFocus(semanticsObject.hasFocus);
}
void _enableBrowserGestureHandling() {
assert(EngineSemantics.instance.gestureMode == GestureMode.browserGestures);
if (!_element.disabled!) {
return;
}
_element.disabled = false;
}
void _updateInputValues() {
assert(EngineSemantics.instance.gestureMode == GestureMode.browserGestures);
final bool updateNeeded = _pendingResync ||
semanticsObject.isValueDirty ||
semanticsObject.isIncreasedValueDirty ||
semanticsObject.isDecreasedValueDirty;
if (!updateNeeded) {
return;
}
_pendingResync = false;
final String surrogateTextValue = '$_currentSurrogateValue';
_element.value = surrogateTextValue;
_element.setAttribute('aria-valuenow', surrogateTextValue);
_element.setAttribute('aria-valuetext', semanticsObject.value!);
final bool canIncrease = semanticsObject.increasedValue!.isNotEmpty;
final String surrogateMaxTextValue =
canIncrease ? '${_currentSurrogateValue + 1}' : surrogateTextValue;
_element.max = surrogateMaxTextValue;
_element.setAttribute('aria-valuemax', surrogateMaxTextValue);
final bool canDecrease = semanticsObject.decreasedValue!.isNotEmpty;
final String surrogateMinTextValue =
canDecrease ? '${_currentSurrogateValue - 1}' : surrogateTextValue;
_element.min = surrogateMinTextValue;
_element.setAttribute('aria-valuemin', surrogateMinTextValue);
}
void _disableBrowserGestureHandling() {
if (_element.disabled!) {
return;
}
_element.disabled = true;
}
@override
void dispose() {
super.dispose();
_focusManager.stopManaging();
EngineSemantics.instance.removeGestureModeListener(_gestureModeListener);
_disableBrowserGestureHandling();
_element.remove();
}
}
| engine/lib/web_ui/lib/src/engine/semantics/incrementable.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/semantics/incrementable.dart",
"repo_id": "engine",
"token_count": 1859
} | 292 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'package:meta/meta.dart';
import 'package:ui/ui.dart' as ui;
import 'dom.dart';
/// How far is the light source from the surface of the UI.
///
/// Originally based on the constant in `flow/layers/physical_shape_layer.cc`.
const double kLightHeight = 600.0;
/// The radius of the light source. The positive radius creates a penumbra in
/// the shadow, which we express using a blur effect.
///
/// Originally based on the constant in `flow/layers/physical_shape_layer.cc`.
const double kLightRadius = 800.0;
/// The X offset of the list source relative to the center of the shape.
///
/// This shifts the shadow along the X asix as if the light beams at an angle.
const double kLightOffsetX = -200.0;
/// The Y offset of the list source relative to the center of the shape.
///
/// This shifts the shadow along the Y asix as if the light beams at an angle.
const double kLightOffsetY = -400.0;
/// Computes the offset that moves the shadow due to the light hitting the
/// shape at an angle.
///
/// ------ light
/// \
/// \
/// \
/// \
/// \
/// --------- shape
/// |\
/// | \
/// | \
/// ------------x---x------------
/// |<->| offset
///
/// This is not a complete physical model. For example, this does not take into
/// account the size of the shape (this function doesn't even take the shape as
/// a parameter). It's just a good enough approximation.
ui.Offset computeShadowOffset(double elevation) {
if (elevation == 0.0) {
return ui.Offset.zero;
}
final double dx = -kLightOffsetX * elevation / kLightHeight;
final double dy = -kLightOffsetY * elevation / kLightHeight;
return ui.Offset(dx, dy);
}
/// Computes the rectangle that contains the penumbra of the shadow cast by
/// the [shape] that's elevated above the surface of the screen at [elevation].
ui.Rect computePenumbraBounds(ui.Rect shape, double elevation) {
if (elevation == 0.0) {
return shape;
}
// tangent for x
final double tx = (kLightRadius + shape.width * 0.5) / kLightHeight;
// tangent for y
final double ty = (kLightRadius + shape.height * 0.5) / kLightHeight;
final double dx = elevation * tx;
final double dy = elevation * ty;
final ui.Offset offset = computeShadowOffset(elevation);
final ui.Rect bounds = ui.Rect.fromLTRB(
shape.left - dx,
shape.top - dy,
shape.right + dx,
shape.bottom + dy,
).shift(offset);
// Expand the bounds rectangle to compensate for inaccuracy in the shadow
// calculation. This is similar to a workaround that had previously been
// used in the Flutter framework to adjust the bounds for shadows drawn
// by Skia.
// (See https://github.com/flutter/flutter/pull/127052)
return bounds.inflate(20);
}
/// Information needed to render a shadow using CSS or canvas.
@immutable
class SurfaceShadowData {
const SurfaceShadowData({
required this.blurWidth,
required this.offset,
});
/// The length in pixels of the shadow.
///
/// This is different from the `sigma` used by blur filters. This value
/// contains the entire shadow, so, for example, to compute the shadow
/// bounds it is sufficient to add this value to the width of the shape
/// that casts it.
final double blurWidth;
/// The offset of the shadow relative to the shape as computed by
/// [computeShadowOffset].
final ui.Offset offset;
}
/// Computes the shadow for [shape] based on its [elevation] from the surface
/// of the screen.
///
/// The algorithm approximates the math done by the C++ implementation from
/// `physical_shape_layer.cc` but it's not exact, since on the Web we do not
/// (cannot) use Skia's shadow API directly. However, this algorithms is
/// consistent with [computePenumbraBounds] used by [RecordingCanvas] during
/// bounds estimation.
SurfaceShadowData? computeShadow(ui.Rect shape, double elevation) {
if (elevation == 0.0) {
return null;
}
final double penumbraTangentX =
(kLightRadius + shape.width * 0.5) / kLightHeight;
final double penumbraTangentY =
(kLightRadius + shape.height * 0.5) / kLightHeight;
final double penumbraWidth = elevation * penumbraTangentX;
final double penumbraHeight = elevation * penumbraTangentY;
return SurfaceShadowData(
// There's no way to express different blur along different dimensions, so
// we use the narrower of the two to prevent the shadow blur from being longer
// than the shape itself, using min instead of average of penumbra values.
blurWidth: math.min(penumbraWidth, penumbraHeight),
offset: computeShadowOffset(elevation),
);
}
/// Applies a CSS shadow to the [shape].
void applyCssShadow(
DomElement? element, ui.Rect shape, double elevation, ui.Color color) {
final SurfaceShadowData? shadow = computeShadow(shape, elevation);
if (shadow == null) {
element!.style.boxShadow = 'none';
} else {
color = toShadowColor(color);
element!.style.boxShadow = '${shadow.offset.dx}px ${shadow.offset.dy}px '
'${shadow.blurWidth}px 0px rgba(${color.red}, ${color.green}, ${color.blue}, ${color.alpha / 255})';
}
}
/// Converts a shadow color specified by the framework to the color that should
/// actually be applied when rendering the shadow.
///
/// Flutter shadows look softer than the color specified by the developer. For
/// example, it is common to get a solid black for a shadow and see a very soft
/// shadow. This function softens the color by reducing its alpha by a constant
/// factor.
ui.Color toShadowColor(ui.Color color) {
// Reduce alpha to make shadows less aggressive:
//
// - https://github.com/flutter/flutter/issues/52734
// - https://github.com/flutter/gallery/issues/118
final int reducedAlpha = (0.3 * color.alpha).round();
return ui.Color((reducedAlpha & 0xff) << 24 | (color.value & 0x00ffffff));
}
| engine/lib/web_ui/lib/src/engine/shadow.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/shadow.dart",
"repo_id": "engine",
"token_count": 1901
} | 293 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@DefaultAsset('skwasm')
library skwasm_impl;
import 'dart:ffi';
import 'package:ui/src/engine/skwasm/skwasm_impl.dart';
final class RawFontCollection extends Opaque {}
typedef FontCollectionHandle = Pointer<RawFontCollection>;
final class RawTypeface extends Opaque {}
typedef TypefaceHandle = Pointer<RawTypeface>;
@Native<FontCollectionHandle Function()>(symbol: 'fontCollection_create', isLeaf: true)
external FontCollectionHandle fontCollectionCreate();
@Native<Void Function(FontCollectionHandle)>(symbol: 'fontCollection_dispose', isLeaf: true)
external void fontCollectionDispose(FontCollectionHandle handle);
@Native<TypefaceHandle Function(SkDataHandle)>(symbol: 'typeface_create', isLeaf: true)
external TypefaceHandle typefaceCreate(SkDataHandle fontData);
@Native<Void Function(TypefaceHandle)>(symbol: 'typeface_dispose', isLeaf: true)
external void typefaceDispose(TypefaceHandle handle);
@Native<Int Function(
Pointer<TypefaceHandle>,
Int,
Pointer<Int32>,
Int,
)>(symbol: 'typefaces_filterCoveredCodePoints', isLeaf: true)
external int typefacesFilterCoveredCodePoints(
Pointer<TypefaceHandle> typefaces,
int typefaceCount,
Pointer<Int32> codepoints,
int codePointCount,
);
@Native<Void Function(
FontCollectionHandle,
TypefaceHandle,
SkStringHandle,
)>(symbol: 'fontCollection_registerTypeface', isLeaf: true)
external void fontCollectionRegisterTypeface(
FontCollectionHandle handle,
TypefaceHandle typeface,
SkStringHandle fontName,
);
@Native<Void Function(
FontCollectionHandle
)>(symbol: 'fontCollection_clearCaches', isLeaf: true)
external void fontCollectionClearCaches(FontCollectionHandle handle);
| engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/raw_fonts.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/raw_fonts.dart",
"repo_id": "engine",
"token_count": 550
} | 294 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@DefaultAsset('skwasm')
library skwasm_impl;
import 'dart:ffi';
import 'package:ui/src/engine/skwasm/skwasm_impl.dart';
final class RawParagraphBuilder extends Opaque {}
typedef ParagraphBuilderHandle = Pointer<RawParagraphBuilder>;
@Native<ParagraphBuilderHandle Function(
ParagraphStyleHandle,
FontCollectionHandle,
)>(symbol: 'paragraphBuilder_create', isLeaf: true)
external ParagraphBuilderHandle paragraphBuilderCreate(
ParagraphStyleHandle styleHandle,
FontCollectionHandle fontCollectionHandle,
);
@Native<Void Function(ParagraphBuilderHandle)>(symbol: 'paragraphBuilder_dispose', isLeaf: true)
external void paragraphBuilderDispose(ParagraphBuilderHandle handle);
@Native<Void Function(
ParagraphBuilderHandle,
Float,
Float,
Int,
Float,
Int,
)>(symbol: 'paragraphBuilder_addPlaceholder', isLeaf: true)
external void paragraphBuilderAddPlaceholder(
ParagraphBuilderHandle handle,
double width,
double height,
int alignment,
double baslineOffset,
int baseline,
);
@Native<Void Function(
ParagraphBuilderHandle,
SkString16Handle,
)>(symbol: 'paragraphBuilder_addText', isLeaf: true)
external void paragraphBuilderAddText(
ParagraphBuilderHandle handle,
SkString16Handle text,
);
@Native<Pointer<Uint8> Function(
ParagraphBuilderHandle,
Pointer<Uint32>
)>(symbol: 'paragraphBuilder_getUtf8Text', isLeaf: true)
external Pointer<Uint8> paragraphBuilderGetUtf8Text(
ParagraphBuilderHandle handle,
Pointer<Uint32> outSize
);
@Native<Void Function(
ParagraphBuilderHandle,
TextStyleHandle,
)>(symbol: 'paragraphBuilder_pushStyle', isLeaf: true)
external void paragraphBuilderPushStyle(
ParagraphBuilderHandle handle,
TextStyleHandle styleHandle,
);
@Native<Void Function(ParagraphBuilderHandle)>(symbol: 'paragraphBuilder_pop', isLeaf: true)
external void paragraphBuilderPop(ParagraphBuilderHandle handle);
@Native<ParagraphHandle Function(ParagraphBuilderHandle)>(symbol: 'paragraphBuilder_build', isLeaf: true)
external ParagraphHandle paragraphBuilderBuild(ParagraphBuilderHandle handle);
@Native<UnicodePositionBufferHandle Function(Size)>(
symbol: 'unicodePositionBuffer_create', isLeaf: true)
external UnicodePositionBufferHandle unicodePositionBufferCreate(int size);
@Native<Pointer<Uint32> Function(UnicodePositionBufferHandle)>(
symbol: 'unicodePositionBuffer_getDataPointer', isLeaf: true)
external Pointer<Uint32> unicodePositionBufferGetDataPointer(UnicodePositionBufferHandle handle);
@Native<Void Function(UnicodePositionBufferHandle)>(
symbol: 'unicodePositionBuffer_free', isLeaf: true)
external void unicodePositionBufferFree(UnicodePositionBufferHandle handle);
@Native<LineBreakBufferHandle Function(Size)>(
symbol: 'lineBreakBuffer_create', isLeaf: true)
external LineBreakBufferHandle lineBreakBufferCreate(int size);
@Native<Pointer<LineBreak> Function(LineBreakBufferHandle)>(
symbol: 'lineBreakBuffer_getDataPointer', isLeaf: true)
external Pointer<LineBreak> lineBreakBufferGetDataPointer(LineBreakBufferHandle handle);
@Native<Void Function(LineBreakBufferHandle)>(
symbol: 'lineBreakBuffer_free', isLeaf: true)
external void lineBreakBufferFree(LineBreakBufferHandle handle);
@Native<Void Function(ParagraphBuilderHandle, UnicodePositionBufferHandle)>(
symbol: 'paragraphBuilder_setGraphemeBreaksUtf16', isLeaf: true)
external void paragraphBuilderSetGraphemeBreaksUtf16(
ParagraphBuilderHandle handle,
UnicodePositionBufferHandle positionBuffer,
);
@Native<Void Function(ParagraphBuilderHandle, UnicodePositionBufferHandle)>(
symbol: 'paragraphBuilder_setWordBreaksUtf16', isLeaf: true)
external void paragraphBuilderSetWordBreaksUtf16(
ParagraphBuilderHandle handle,
UnicodePositionBufferHandle positionBuffer,
);
@Native<Void Function(ParagraphBuilderHandle, LineBreakBufferHandle)>(
symbol: 'paragraphBuilder_setLineBreaksUtf16', isLeaf: true)
external void paragraphBuilderSetLineBreaksUtf16(
ParagraphBuilderHandle handle,
LineBreakBufferHandle positionBuffer,
);
| engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/text/raw_paragraph_builder.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/text/raw_paragraph_builder.dart",
"repo_id": "engine",
"token_count": 1207
} | 295 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'package:ui/ui.dart' as ui;
import '../util.dart';
import 'canvas_paragraph.dart';
import 'fragmenter.dart';
import 'layout_service.dart';
import 'line_breaker.dart';
import 'paragraph.dart';
import 'text_direction.dart';
/// Splits [text] into fragments that are ready to be laid out by
/// [TextLayoutService].
///
/// This fragmenter takes into account line breaks, directionality and styles.
class LayoutFragmenter extends TextFragmenter {
const LayoutFragmenter(super.text, this.paragraphSpans);
final List<ParagraphSpan> paragraphSpans;
@override
List<LayoutFragment> fragment() {
final List<LayoutFragment> fragments = <LayoutFragment>[];
int fragmentStart = 0;
final Iterator<LineBreakFragment> lineBreakFragments = LineBreakFragmenter(text).fragment().iterator..moveNext();
final Iterator<BidiFragment> bidiFragments = BidiFragmenter(text).fragment().iterator..moveNext();
final Iterator<ParagraphSpan> spans = paragraphSpans.iterator..moveNext();
LineBreakFragment currentLineBreakFragment = lineBreakFragments.current;
BidiFragment currentBidiFragment = bidiFragments.current;
ParagraphSpan currentSpan = spans.current;
while (true) {
final int fragmentEnd = math.min(
currentLineBreakFragment.end,
math.min(
currentBidiFragment.end,
currentSpan.end,
),
);
final int distanceFromLineBreak = currentLineBreakFragment.end - fragmentEnd;
final LineBreakType lineBreakType = distanceFromLineBreak == 0
? currentLineBreakFragment.type
: LineBreakType.prohibited;
final int trailingNewlines = currentLineBreakFragment.trailingNewlines - distanceFromLineBreak;
final int trailingSpaces = currentLineBreakFragment.trailingSpaces - distanceFromLineBreak;
final int fragmentLength = fragmentEnd - fragmentStart;
fragments.add(LayoutFragment(
fragmentStart,
fragmentEnd,
lineBreakType,
currentBidiFragment.textDirection,
currentBidiFragment.fragmentFlow,
currentSpan,
trailingNewlines: clampInt(trailingNewlines, 0, fragmentLength),
trailingSpaces: clampInt(trailingSpaces, 0, fragmentLength),
));
fragmentStart = fragmentEnd;
bool moved = false;
if (currentLineBreakFragment.end == fragmentEnd) {
if (lineBreakFragments.moveNext()) {
moved = true;
currentLineBreakFragment = lineBreakFragments.current;
}
}
if (currentBidiFragment.end == fragmentEnd) {
if (bidiFragments.moveNext()) {
moved = true;
currentBidiFragment = bidiFragments.current;
}
}
if (currentSpan.end == fragmentEnd) {
if (spans.moveNext()) {
moved = true;
currentSpan = spans.current;
}
}
// Once we reached the end of all fragments, exit the loop.
if (!moved) {
break;
}
}
return fragments;
}
}
abstract class _CombinedFragment extends TextFragment {
_CombinedFragment(
super.start,
super.end,
this.type,
this._textDirection,
this.fragmentFlow,
this.span, {
required this.trailingNewlines,
required this.trailingSpaces,
}) : assert(trailingNewlines >= 0),
assert(trailingSpaces >= trailingNewlines);
final LineBreakType type;
ui.TextDirection? get textDirection => _textDirection;
ui.TextDirection? _textDirection;
final FragmentFlow fragmentFlow;
final ParagraphSpan span;
final int trailingNewlines;
final int trailingSpaces;
@override
int get hashCode => Object.hash(
start,
end,
type,
textDirection,
fragmentFlow,
span,
trailingNewlines,
trailingSpaces,
);
@override
bool operator ==(Object other) {
return other is LayoutFragment &&
other.start == start &&
other.end == end &&
other.type == type &&
other.textDirection == textDirection &&
other.fragmentFlow == fragmentFlow &&
other.span == span &&
other.trailingNewlines == trailingNewlines &&
other.trailingSpaces == trailingSpaces;
}
}
class LayoutFragment extends _CombinedFragment with _FragmentMetrics, _FragmentPosition, _FragmentBox {
LayoutFragment(
super.start,
super.end,
super.type,
super.textDirection,
super.fragmentFlow,
super.span, {
required super.trailingNewlines,
required super.trailingSpaces,
});
int get length => end - start;
bool get isSpaceOnly => length == trailingSpaces;
bool get isPlaceholder => span is PlaceholderSpan;
bool get isBreak => type != LineBreakType.prohibited;
bool get isHardBreak => type == LineBreakType.mandatory || type == LineBreakType.endOfText;
EngineTextStyle get style => span.style;
/// Returns the substring from [paragraph] that corresponds to this fragment,
/// excluding new line characters.
String getText(CanvasParagraph paragraph) {
return paragraph.plainText.substring(start, end - trailingNewlines);
}
/// Splits this fragment into two fragments with the split point being the
/// given [index].
// TODO(mdebbar): If we ever get multiple return values in Dart, we should use it!
// See: https://github.com/dart-lang/language/issues/68
List<LayoutFragment?> split(int index) {
assert(start <= index);
assert(index <= end);
if (start == index) {
return <LayoutFragment?>[null, this];
}
if (end == index) {
return <LayoutFragment?>[this, null];
}
// The length of the second fragment after the split.
final int secondLength = end - index;
// Trailing spaces/new lines go to the second fragment. Any left over goes
// to the first fragment.
final int secondTrailingNewlines = math.min(trailingNewlines, secondLength);
final int secondTrailingSpaces = math.min(trailingSpaces, secondLength);
return <LayoutFragment>[
LayoutFragment(
start,
index,
LineBreakType.prohibited,
textDirection,
fragmentFlow,
span,
trailingNewlines: trailingNewlines - secondTrailingNewlines,
trailingSpaces: trailingSpaces - secondTrailingSpaces,
),
LayoutFragment(
index,
end,
type,
textDirection,
fragmentFlow,
span,
trailingNewlines: secondTrailingNewlines,
trailingSpaces: secondTrailingSpaces,
),
];
}
@override
String toString() {
return '$LayoutFragment($start, $end, $type, $textDirection)';
}
}
mixin _FragmentMetrics on _CombinedFragment {
late Spanometer _spanometer;
/// The rise from the baseline as calculated from the font and style for this text.
double get ascent => _ascent;
late double _ascent;
/// The drop from the baseline as calculated from the font and style for this text.
double get descent => _descent;
late double _descent;
/// The width of the measured text, not including trailing spaces.
double get widthExcludingTrailingSpaces => _widthExcludingTrailingSpaces;
late double _widthExcludingTrailingSpaces;
/// The width of the measured text, including any trailing spaces.
double get widthIncludingTrailingSpaces => _widthIncludingTrailingSpaces + _extraWidthForJustification;
late double _widthIncludingTrailingSpaces;
double _extraWidthForJustification = 0.0;
/// The total height as calculated from the font and style for this text.
double get height => ascent + descent;
double get widthOfTrailingSpaces => widthIncludingTrailingSpaces - widthExcludingTrailingSpaces;
/// Set measurement values for the fragment.
void setMetrics(Spanometer spanometer, {
required double ascent,
required double descent,
required double widthExcludingTrailingSpaces,
required double widthIncludingTrailingSpaces,
}) {
_spanometer = spanometer;
_ascent = ascent;
_descent = descent;
_widthExcludingTrailingSpaces = widthExcludingTrailingSpaces;
_widthIncludingTrailingSpaces = widthIncludingTrailingSpaces;
}
}
/// Encapsulates positioning of the fragment relative to the line.
///
/// The coordinates are all relative to the line it belongs to. For example,
/// [left] is the distance from the left edge of the line to the left edge of
/// the fragment.
///
/// This is what the various measurements/coordinates look like for a fragment
/// in an LTR paragraph:
///
/// *------------------------line.width-----------------*
/// *---width----*
/// ┌─────────────────┬────────────┬────────────────────┐
/// │ │--FRAGMENT--│ │
/// └─────────────────┴────────────┴────────────────────┘
/// *---startOffset---*
/// *------left-------*
/// *--------endOffset-------------*
/// *----------right---------------*
///
///
/// And in an RTL paragraph, [startOffset] and [endOffset] are flipped because
/// the line starts from the right. Here's what they look like:
///
/// *------------------------line.width-----------------*
/// *---width----*
/// ┌─────────────────┬────────────┬────────────────────┐
/// │ │--FRAGMENT--│ │
/// └─────────────────┴────────────┴────────────────────┘
/// *----startOffset-----*
/// *------left-------*
/// *-----------endOffset-------------*
/// *----------right---------------*
///
mixin _FragmentPosition on _CombinedFragment, _FragmentMetrics {
/// The distance from the beginning of the line to the beginning of the fragment.
double get startOffset => _startOffset;
late double _startOffset;
/// The width of the line that contains this fragment.
late ParagraphLine line;
/// The distance from the beginning of the line to the end of the fragment.
double get endOffset => startOffset + widthIncludingTrailingSpaces;
/// The distance from the left edge of the line to the left edge of the fragment.
double get left => line.textDirection == ui.TextDirection.ltr
? startOffset
: line.width - endOffset;
/// The distance from the left edge of the line to the right edge of the fragment.
double get right => line.textDirection == ui.TextDirection.ltr
? endOffset
: line.width - startOffset;
/// Set the horizontal position of this fragment relative to the [line] that
/// contains it.
void setPosition({
required double startOffset,
required ui.TextDirection textDirection,
}) {
_startOffset = startOffset;
_textDirection ??= textDirection;
}
/// Adjust the width of this fragment for paragraph justification.
void justifyTo({required double paragraphWidth}) {
// Only justify this fragment if it's not a trailing space in the line.
if (end > line.endIndex - line.trailingSpaces) {
// Don't justify fragments that are part of trailing spaces of the line.
return;
}
if (trailingSpaces == 0) {
// If this fragment has no spaces, there's nothing to justify.
return;
}
final double justificationTotal = paragraphWidth - line.width;
final double justificationPerSpace = justificationTotal / line.nonTrailingSpaces;
_extraWidthForJustification = justificationPerSpace * trailingSpaces;
}
}
/// Encapsulates calculations related to the bounding box of the fragment
/// relative to the paragraph.
mixin _FragmentBox on _CombinedFragment, _FragmentMetrics, _FragmentPosition {
double get top => line.baseline - ascent;
double get bottom => line.baseline + descent;
late final ui.TextBox _textBoxIncludingTrailingSpaces = ui.TextBox.fromLTRBD(
line.left + left,
top,
line.left + right,
bottom,
textDirection!,
);
/// Whether or not the trailing spaces of this fragment are part of trailing
/// spaces of the line containing the fragment.
bool get _isPartOfTrailingSpacesInLine => end > line.endIndex - line.trailingSpaces;
/// Returns a [ui.TextBox] for the purpose of painting this fragment.
///
/// The coordinates of the resulting [ui.TextBox] are relative to the
/// paragraph, not to the line.
///
/// Trailing spaces in each line aren't painted on the screen, so they are
/// excluded from the resulting text box.
ui.TextBox toPaintingTextBox() {
if (_isPartOfTrailingSpacesInLine) {
// For painting, we exclude the width of trailing spaces from the box.
return textDirection! == ui.TextDirection.ltr
? ui.TextBox.fromLTRBD(
line.left + left,
top,
line.left + right - widthOfTrailingSpaces,
bottom,
textDirection!,
)
: ui.TextBox.fromLTRBD(
line.left + left + widthOfTrailingSpaces,
top,
line.left + right,
bottom,
textDirection!,
);
}
return _textBoxIncludingTrailingSpaces;
}
/// Returns a [ui.TextBox] representing this fragment.
///
/// The coordinates of the resulting [ui.TextBox] are relative to the
/// paragraph, not to the line.
///
/// As opposed to [toPaintingTextBox], the resulting text box from this method
/// includes trailing spaces of the fragment.
ui.TextBox toTextBox({
int? start,
int? end,
}) {
start ??= this.start;
end ??= this.end;
if (start <= this.start && end >= this.end - trailingNewlines) {
return _textBoxIncludingTrailingSpaces;
}
return _intersect(start, end);
}
/// Performs the intersection of this fragment with the range given by [start] and
/// [end] indices, and returns a [ui.TextBox] representing that intersection.
///
/// The coordinates of the resulting [ui.TextBox] are relative to the
/// paragraph, not to the line.
ui.TextBox _intersect(int start, int end) {
// `_intersect` should only be called when there's an actual intersection.
assert(start > this.start || end < this.end);
final double before;
if (start <= this.start) {
before = 0.0;
} else {
_spanometer.currentSpan = span;
before = _spanometer.measureRange(this.start, start);
}
final double after;
if (end >= this.end - trailingNewlines) {
after = 0.0;
} else {
_spanometer.currentSpan = span;
after = _spanometer.measureRange(end, this.end - trailingNewlines);
}
final double left, right;
if (textDirection! == ui.TextDirection.ltr) {
// Example: let's say the text is "Loremipsum" and we want to get the box
// for "rem". In this case, `before` is the width of "Lo", and `after`
// is the width of "ipsum".
//
// Here's how the measurements/coordinates look like:
//
// before after
// |----| |----------|
// +---------------------+
// | L o r e m i p s u m |
// +---------------------+
// this.left ^ ^ this.right
left = this.left + before;
right = this.right - after;
} else {
// Example: let's say the text is "txet_werbeH" ("Hebrew_text" flowing from
// right to left). Say we want to get the box for "brew". The `before` is
// the width of "He", and `after` is the width of "_text".
//
// after before
// |----------| |----|
// +-----------------------+
// | t x e t _ w e r b e H |
// +-----------------------+
// this.left ^ ^ this.right
//
// Notice how `before` and `after` are reversed in the RTL example. That's
// because the text flows from right to left.
left = this.left + after;
right = this.right - before;
}
// The fragment's left and right edges are relative to the line. In order
// to make them relative to the paragraph, we need to add the left edge of
// the line.
return ui.TextBox.fromLTRBD(
line.left + left,
top,
line.left + right,
bottom,
textDirection!,
);
}
/// Returns the text position within this fragment's range that's closest to
/// the given [x] offset.
///
/// The [x] offset is expected to be relative to the left edge of the fragment.
ui.TextPosition getPositionForX(double x) {
x = _makeXDirectionAgnostic(x);
final int startIndex = start;
final int endIndex = end - trailingNewlines;
// Check some special cases to return the result quicker.
final int length = endIndex - startIndex;
if (length == 0) {
return ui.TextPosition(offset: startIndex);
}
if (length == 1) {
// Find out if `x` is closer to `startIndex` or `endIndex`.
final double distanceFromStart = x;
final double distanceFromEnd = widthIncludingTrailingSpaces - x;
return distanceFromStart < distanceFromEnd
? ui.TextPosition(offset: startIndex)
: ui.TextPosition(offset: endIndex, affinity: ui.TextAffinity.upstream,);
}
_spanometer.currentSpan = span;
// The resulting `cutoff` is the index of the character where the `x` offset
// falls. We should return the text position of either `cutoff` or
// `cutoff + 1` depending on which one `x` is closer to.
//
// offset x
// ↓
// "A B C D E F"
// ↑
// cutoff
final int cutoff = _spanometer.forceBreak(
startIndex,
endIndex,
availableWidth: x,
allowEmpty: true,
);
if (cutoff == endIndex) {
return ui.TextPosition(
offset: cutoff,
affinity: ui.TextAffinity.upstream,
);
}
final double lowWidth = _spanometer.measureRange(startIndex, cutoff);
final double highWidth = _spanometer.measureRange(startIndex, cutoff + 1);
// See if `x` is closer to `cutoff` or `cutoff + 1`.
if (x - lowWidth < highWidth - x) {
// The offset is closer to cutoff.
return ui.TextPosition(offset: cutoff);
} else {
// The offset is closer to cutoff + 1.
return ui.TextPosition(
offset: cutoff + 1,
affinity: ui.TextAffinity.upstream,
);
}
}
/// Transforms the [x] coordinate to be direction-agnostic.
///
/// The X (input) is relative to the [left] edge of the fragment, and this
/// method returns an X' (output) that's relative to beginning of the text.
///
/// Here's how it looks for a fragment with LTR content:
///
/// *------------------------line width------------------*
/// *-----X (input)
/// ┌───────────┬────────────────────────┬───────────────┐
/// │ │ ---text-direction----> │ │
/// └───────────┴────────────────────────┴───────────────┘
/// *-----X' (output)
/// *---left----*
/// *---------------right----------------*
///
///
/// And here's how it looks for a fragment with RTL content:
///
/// *------------------------line width------------------*
/// *-----X (input)
/// ┌───────────┬────────────────────────┬───────────────┐
/// │ │ <---text-direction---- │ │
/// └───────────┴────────────────────────┴───────────────┘
/// (output) X'-----------------*
/// *---left----*
/// *---------------right----------------*
///
double _makeXDirectionAgnostic(double x) {
if (textDirection == ui.TextDirection.rtl) {
return widthIncludingTrailingSpaces - x;
}
return x;
}
// [start, end).map((index) => line.graphemeStarts[index]) gives an ascending
// list of UTF16 offsets of graphemes that start in this fragment.
//
// Returns null if this fragment contains no grapheme starts.
late final (int, int)? graphemeStartIndexRange = _getBreaksRange();
(int, int)? _getBreaksRange() {
if (end == start) {
return null;
}
final List<int> lineGraphemeBreaks = line.graphemeStarts;
assert(end > start);
assert(line.graphemeStarts.isNotEmpty);
final int startIndex = line.graphemeStartIndexBefore(start, 0, lineGraphemeBreaks.length);
final int endIndex = end == start + 1
? startIndex + 1
: line.graphemeStartIndexBefore(end - 1, startIndex, lineGraphemeBreaks.length) + 1;
final int firstGraphemeStart = lineGraphemeBreaks[startIndex];
return firstGraphemeStart > start
? (endIndex == startIndex + 1 ? null : (startIndex + 1, endIndex))
: (startIndex, endIndex);
}
/// Whether the first codepoints of this fragment is not a valid grapheme start,
/// and belongs in the the previous fragment.
///
/// This is the result of a known bug: in rare circumstances, a grapheme is
/// split into different fragments. To workaround this we ignore the trailing
/// part of the grapheme during hit-testing, by adjusting the leading offset of
/// a fragment to the leading edge of the first grapheme start in that fragment.
//
// TODO(LongCatIsLooong): Grapheme clusters should not be separately even
// when they are in different runs. Also document the recommendation to use
// U+25CC or U+00A0 for showing nonspacing marks in isolation.
bool get hasLeadingBrokenGrapheme {
final int? graphemeStartIndexRangeStart = graphemeStartIndexRange?.$1;
return graphemeStartIndexRangeStart == null || line.graphemeStarts[graphemeStartIndexRangeStart] != start;
}
/// Returns the GlyphInfo within the range [line.graphemeStarts[startIndex], line.graphemeStarts[endIndex]),
/// that's visually closeset to the given horizontal offset `x` (in the paragraph's coordinates).
ui.GlyphInfo _getClosestCharacterInRange(double x, int startIndex, int endIndex) {
final List<int> graphemeStartIndices = line.graphemeStarts;
final ui.TextRange fullRange = ui.TextRange(start: graphemeStartIndices[startIndex], end: graphemeStartIndices[endIndex]);
final ui.TextBox fullBox = toTextBox(start: fullRange.start, end: fullRange.end);
if (startIndex + 1 == endIndex) {
return ui.GlyphInfo(fullBox.toRect(), fullRange, fullBox.direction);
}
assert(startIndex + 1 < endIndex);
final ui.TextBox(:double left, :double right) = fullBox;
// The toTextBox call is potentially expensive so we'll try reducing the
// search steps with a binary search.
//
// x ∈ (left, right),
if (left < x && x < right) {
final int midIndex = (startIndex + endIndex) ~/ 2;
// endIndex >= startIndex + 2, so midIndex >= start + 1
final ui.GlyphInfo firstHalf = _getClosestCharacterInRange(x, startIndex, midIndex);
if (firstHalf.graphemeClusterLayoutBounds.left < x && x < firstHalf.graphemeClusterLayoutBounds.right) {
return firstHalf;
}
// startIndex <= endIndex - 2, so midIndex <= endIndex - 1
final ui.GlyphInfo secondHalf = _getClosestCharacterInRange(x, midIndex, endIndex);
if (secondHalf.graphemeClusterLayoutBounds.left < x && x < secondHalf.graphemeClusterLayoutBounds.right) {
return secondHalf;
}
// Neither box clips the given x. This is supposed to be rare.
final double distanceToFirst = (x - x.clamp(firstHalf.graphemeClusterLayoutBounds.left, firstHalf.graphemeClusterLayoutBounds.right)).abs();
final double distanceToSecond = (x - x.clamp(secondHalf.graphemeClusterLayoutBounds.left, secondHalf.graphemeClusterLayoutBounds.right)).abs();
return distanceToFirst > distanceToSecond ? firstHalf : secondHalf;
}
// x ∉ (left, right), it's either the first character or the last, since
// there can only be one writing direction in the fragment.
final ui.TextRange range = switch ((fullBox.direction, x <= left)) {
(ui.TextDirection.ltr, true) || (ui.TextDirection.rtl, false) => ui.TextRange(
start: graphemeStartIndices[startIndex],
end: graphemeStartIndices[startIndex + 1],
),
(ui.TextDirection.ltr, false) || (ui.TextDirection.rtl, true) => ui.TextRange(
start: graphemeStartIndices[endIndex - 1],
end: graphemeStartIndices[endIndex],
),
};
assert(!range.isCollapsed);
final ui.TextBox box = toTextBox(start: range.start, end: range.end);
return ui.GlyphInfo(box.toRect(), range, box.direction);
}
/// Returns the GlyphInfo of the character in the fragment that is closest to
/// the given offset x.
ui.GlyphInfo getClosestCharacterBox(double x) {
assert(end > start);
assert(graphemeStartIndexRange != null);
// The force ! is safe here because this method is only called by
// LayoutService.getClosestGlyphInfo which checks this fragment has at least
// one grapheme start before calling this method.
final (int rangeStart, int rangeEnd) = graphemeStartIndexRange!;
return _getClosestCharacterInRange(x, rangeStart, rangeEnd);
}
}
class EllipsisFragment extends LayoutFragment {
EllipsisFragment(
int index,
ParagraphSpan span,
) : super(
index,
index,
LineBreakType.endOfText,
null,
// The ellipsis is always at the end of the line, so it can't be
// sandwiched. This means it'll always follow the paragraph direction.
FragmentFlow.sandwich,
span,
trailingNewlines: 0,
trailingSpaces: 0,
);
@override
bool get isSpaceOnly => false;
@override
bool get isPlaceholder => false;
@override
String getText(CanvasParagraph paragraph) {
return paragraph.paragraphStyle.ellipsis!;
}
@override
List<LayoutFragment> split(int index) {
throw Exception('Cannot split an EllipsisFragment');
}
}
| engine/lib/web_ui/lib/src/engine/text/layout_fragmenter.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/text/layout_fragmenter.dart",
"repo_id": "engine",
"token_count": 9649
} | 296 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import '../browser_detection.dart';
import '../dom.dart';
/// Controls the capitalization of the text.
///
/// This corresponds to Flutter's [TextCapitalization].
///
/// Uses `text-transform` css property.
/// See: https://developer.mozilla.org/en-US/docs/Web/CSS/text-transform
enum TextCapitalization {
/// Uppercase for the first letter of each word.
words,
/// Currently not implemented on Flutter Web. Uppercase for the first letter
/// of each sentence.
sentences,
/// Uppercase for each letter.
characters,
/// Lowercase for each letter.
none,
}
/// Helper class for text capitalization.
///
/// Uses `autocapitalize` attribute on input element.
/// See: https://developers.google.com/web/updates/2015/04/autocapitalize
/// https://developer.mozilla.org/en-US/docs/Web/HTML/Global_attributes/autocapitalize
class TextCapitalizationConfig {
const TextCapitalizationConfig.defaultCapitalization()
: textCapitalization = TextCapitalization.none;
const TextCapitalizationConfig.fromInputConfiguration(String inputConfiguration)
: textCapitalization =
inputConfiguration == 'TextCapitalization.words'
? TextCapitalization.words
: inputConfiguration == 'TextCapitalization.characters'
? TextCapitalization.characters
: inputConfiguration == 'TextCapitalization.sentences'
? TextCapitalization.sentences
: TextCapitalization.none;
final TextCapitalization textCapitalization;
/// Sets `autocapitalize` attribute on input elements.
///
/// This attribute is only available for mobile browsers.
///
/// Note that in mobile browsers the onscreen keyboards provide sentence
/// level capitalization as default as apposed to no capitalization on desktop
/// browser.
///
/// See: https://developers.google.com/web/updates/2015/04/autocapitalize
/// https://developer.mozilla.org/en-US/docs/Web/HTML/Global_attributes/autocapitalize
void setAutocapitalizeAttribute(DomHTMLElement domElement) {
String autocapitalize = '';
switch (textCapitalization) {
case TextCapitalization.words:
// TODO(mdebbar): There is a bug for `words` level capitalization in IOS now.
// For now go back to default. Remove the check after bug is resolved.
// https://bugs.webkit.org/show_bug.cgi?id=148504
if (browserEngine == BrowserEngine.webkit) {
autocapitalize = 'sentences';
} else {
autocapitalize = 'words';
}
case TextCapitalization.characters:
autocapitalize = 'characters';
case TextCapitalization.sentences:
autocapitalize = 'sentences';
case TextCapitalization.none:
default:
autocapitalize = 'off';
break;
}
if (domInstanceOfString(domElement, 'HTMLInputElement')) {
final DomHTMLInputElement element = domElement as DomHTMLInputElement;
element.setAttribute('autocapitalize', autocapitalize);
} else if (domInstanceOfString(domElement, 'HTMLTextAreaElement')) {
final DomHTMLTextAreaElement element = domElement as
DomHTMLTextAreaElement;
element.setAttribute('autocapitalize', autocapitalize);
}
}
}
| engine/lib/web_ui/lib/src/engine/text_editing/text_capitalization.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/text_editing/text_capitalization.dart",
"repo_id": "engine",
"token_count": 1189
} | 297 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:meta/meta.dart';
import '../browser_detection.dart';
import '../dom.dart';
import '../text_editing/text_editing.dart';
import 'dom_manager.dart';
/// Manages the CSS styles of the Flutter View.
class StyleManager {
static const String defaultFontStyle = 'normal';
static const String defaultFontWeight = 'normal';
static const double defaultFontSize = 14.0;
static const String defaultFontFamily = 'sans-serif';
static const String defaultCssFont = '$defaultFontStyle $defaultFontWeight ${defaultFontSize}px $defaultFontFamily';
static void attachGlobalStyles({
required DomNode node,
required String styleId,
required String? styleNonce,
required String cssSelectorPrefix,
}) {
final DomHTMLStyleElement styleElement = createDomHTMLStyleElement(styleNonce);
styleElement.id = styleId;
// The style element must be appended to the DOM, or its `sheet` will be null later.
node.appendChild(styleElement);
applyGlobalCssRulesToSheet(
styleElement,
defaultCssFont: StyleManager.defaultCssFont,
cssSelectorPrefix: cssSelectorPrefix,
);
}
static void styleSceneHost(
DomElement sceneHost, {
bool debugShowSemanticsNodes = false,
}) {
assert(sceneHost.tagName.toLowerCase() == DomManager.sceneHostTagName.toLowerCase());
// Don't allow the scene to receive pointer events.
sceneHost.style.pointerEvents = 'none';
// When debugging semantics, make the scene semi-transparent so that the
// semantics tree is more prominent.
if (debugShowSemanticsNodes) {
sceneHost.style.opacity = '0.3';
}
}
static void styleSemanticsHost(
DomElement semanticsHost,
double devicePixelRatio,
) {
assert(semanticsHost.tagName.toLowerCase() == DomManager.semanticsHostTagName.toLowerCase());
semanticsHost.style
..position = 'absolute'
..transformOrigin = '0 0 0';
scaleSemanticsHost(semanticsHost, devicePixelRatio);
}
/// The framework specifies semantics in physical pixels, but CSS uses
/// logical pixels. To compensate, an inverse scale is injected at the root
/// level.
static void scaleSemanticsHost(
DomElement semanticsHost,
double devicePixelRatio,
) {
assert(semanticsHost.tagName.toLowerCase() == DomManager.semanticsHostTagName.toLowerCase());
semanticsHost.style.transform = 'scale(${1 / devicePixelRatio})';
}
}
/// Applies the required global CSS to an incoming [DomCSSStyleSheet] `sheet`.
@visibleForTesting
void applyGlobalCssRulesToSheet(
DomHTMLStyleElement styleElement, {
String cssSelectorPrefix = '',
required String defaultCssFont,
}) {
styleElement.appendText(
// Fixes #115216 by ensuring that our parameters only affect the flt-scene-host children.
'$cssSelectorPrefix ${DomManager.sceneHostTagName} {'
' font: $defaultCssFont;'
'}'
// This undoes browser's default painting and layout attributes of range
// input, which is used in semantics.
'$cssSelectorPrefix flt-semantics input[type=range] {'
' appearance: none;'
' -webkit-appearance: none;'
' width: 100%;'
' position: absolute;'
' border: none;'
' top: 0;'
' right: 0;'
' bottom: 0;'
' left: 0;'
'}'
// The invisible semantic text field may have a visible cursor and selection
// highlight. The following 2 CSS rules force everything to be transparent.
'$cssSelectorPrefix input::selection {'
' background-color: transparent;'
'}'
'$cssSelectorPrefix textarea::selection {'
' background-color: transparent;'
'}'
'$cssSelectorPrefix flt-semantics input,'
'$cssSelectorPrefix flt-semantics textarea,'
'$cssSelectorPrefix flt-semantics [contentEditable="true"] {'
' caret-color: transparent;'
'}'
// Hide placeholder text
'$cssSelectorPrefix .flt-text-editing::placeholder {'
' opacity: 0;'
'}'
// Hide outline when the flutter-view root element is focused.
'$cssSelectorPrefix:focus {'
' outline: none;'
'}',
);
// By default on iOS, Safari would highlight the element that's being tapped
// on using gray background. This CSS rule disables that.
if (isSafari) {
styleElement.appendText(
'$cssSelectorPrefix * {'
' -webkit-tap-highlight-color: transparent;'
'}'
'$cssSelectorPrefix flt-semantics input[type=range]::-webkit-slider-thumb {'
' -webkit-appearance: none;'
'}'
);
}
if (isFirefox) {
// For firefox set line-height, otherwise text at same font-size will
// measure differently in ruler.
//
// - See: https://github.com/flutter/flutter/issues/44803
styleElement.appendText(
'$cssSelectorPrefix flt-paragraph,'
'$cssSelectorPrefix flt-span {'
' line-height: 100%;'
'}'
);
}
// This CSS makes the autofill overlay transparent in order to prevent it
// from overlaying on top of Flutter-rendered text inputs.
// See: https://github.com/flutter/flutter/issues/118337.
if (browserHasAutofillOverlay()) {
styleElement.appendText(
'$cssSelectorPrefix .transparentTextEditing:-webkit-autofill,'
'$cssSelectorPrefix .transparentTextEditing:-webkit-autofill:hover,'
'$cssSelectorPrefix .transparentTextEditing:-webkit-autofill:focus,'
'$cssSelectorPrefix .transparentTextEditing:-webkit-autofill:active {'
' opacity: 0 !important;'
'}'
);
}
// Removes password reveal icon for text inputs in Edge browsers.
// Non-Edge browsers will crash trying to parse -ms-reveal CSS selector,
// so we guard it behind an isEdge check.
// Fixes: https://github.com/flutter/flutter/issues/83695
if (isEdge) {
// We try-catch this, because in testing, we fake Edge via the UserAgent,
// so the below will throw an exception (because only real Edge understands
// the ::-ms-reveal pseudo-selector).
try {
styleElement.appendText(
'$cssSelectorPrefix input::-ms-reveal {'
' display: none;'
'}'
);
} on DomException catch (e) {
// Browsers that don't understand ::-ms-reveal throw a DOMException
// of type SyntaxError.
domWindow.console.warn(e);
// Add a fake rule if our code failed because we're under testing
assert(() {
styleElement.appendText(
'$cssSelectorPrefix input.fallback-for-fakey-browser-in-ci {'
' display: none;'
'}'
);
return true;
}());
}
}
}
| engine/lib/web_ui/lib/src/engine/view_embedder/style_manager.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/view_embedder/style_manager.dart",
"repo_id": "engine",
"token_count": 2377
} | 298 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
part of ui;
abstract class Display {
int get id;
double get devicePixelRatio;
Size get size;
double get refreshRate;
}
abstract class FlutterView {
PlatformDispatcher get platformDispatcher;
int get viewId;
double get devicePixelRatio;
ViewConstraints get physicalConstraints;
Size get physicalSize;
ViewPadding get viewInsets;
ViewPadding get viewPadding;
ViewPadding get systemGestureInsets;
ViewPadding get padding;
GestureSettings get gestureSettings;
List<DisplayFeature> get displayFeatures;
Display get display;
void render(Scene scene, {Size? size});
void updateSemantics(SemanticsUpdate update) => platformDispatcher.updateSemantics(update);
}
abstract class SingletonFlutterWindow extends FlutterView {
VoidCallback? get onMetricsChanged;
set onMetricsChanged(VoidCallback? callback);
Locale get locale;
List<Locale> get locales;
Locale? computePlatformResolvedLocale(List<Locale> supportedLocales);
VoidCallback? get onLocaleChanged;
set onLocaleChanged(VoidCallback? callback);
String get initialLifecycleState;
double get textScaleFactor;
bool get nativeSpellCheckServiceDefined;
bool get brieflyShowPassword;
bool get alwaysUse24HourFormat;
VoidCallback? get onTextScaleFactorChanged;
set onTextScaleFactorChanged(VoidCallback? callback);
Brightness get platformBrightness;
VoidCallback? get onPlatformBrightnessChanged;
set onPlatformBrightnessChanged(VoidCallback? callback);
String? get systemFontFamily;
VoidCallback? get onSystemFontFamilyChanged;
set onSystemFontFamilyChanged(VoidCallback? callback);
FrameCallback? get onBeginFrame;
set onBeginFrame(FrameCallback? callback);
VoidCallback? get onDrawFrame;
set onDrawFrame(VoidCallback? callback);
TimingsCallback? get onReportTimings;
set onReportTimings(TimingsCallback? callback);
PointerDataPacketCallback? get onPointerDataPacket;
set onPointerDataPacket(PointerDataPacketCallback? callback);
KeyDataCallback? get onKeyData;
set onKeyData(KeyDataCallback? callback);
String get defaultRouteName;
void scheduleFrame();
bool get semanticsEnabled;
VoidCallback? get onSemanticsEnabledChanged;
set onSemanticsEnabledChanged(VoidCallback? callback);
FrameData get frameData;
VoidCallback? get onFrameDataChanged;
set onFrameDataChanged(VoidCallback? callback);
AccessibilityFeatures get accessibilityFeatures;
VoidCallback? get onAccessibilityFeaturesChanged;
set onAccessibilityFeaturesChanged(VoidCallback? callback);
void sendPlatformMessage(
String name,
ByteData? data,
PlatformMessageResponseCallback? callback,
);
PlatformMessageCallback? get onPlatformMessage;
set onPlatformMessage(PlatformMessageCallback? callback);
void setIsolateDebugName(String name);
}
abstract class AccessibilityFeatures {
bool get accessibleNavigation;
bool get invertColors;
bool get disableAnimations;
bool get boldText;
bool get reduceMotion;
bool get highContrast;
bool get onOffSwitchLabels;
}
enum Brightness {
dark,
light,
}
// Unimplemented classes.
// TODO(dit): see https://github.com/flutter/flutter/issues/33614.
class CallbackHandle {
CallbackHandle.fromRawHandle(this._handle);
final int _handle;
int toRawHandle() => _handle;
@override
bool operator ==(Object other) => identical(this, other);
@override
// ignore: unnecessary_overrides
int get hashCode => super.hashCode;
}
// TODO(dit): see https://github.com/flutter/flutter/issues/33615.
abstract final class PluginUtilities {
static CallbackHandle? getCallbackHandle(Function callback) {
throw UnimplementedError();
}
static Function? getCallbackFromHandle(CallbackHandle handle) {
throw UnimplementedError();
}
}
abstract final class IsolateNameServer {
static dynamic lookupPortByName(String name) {
throw UnimplementedError();
}
static bool registerPortWithName(dynamic port, String name) {
throw UnimplementedError();
}
static bool removePortNameMapping(String name) {
throw UnimplementedError();
}
}
SingletonFlutterWindow get window => engine.window;
class FrameData {
const FrameData.webOnly();
int get frameNumber => -1;
}
class GestureSettings {
const GestureSettings({
this.physicalTouchSlop,
this.physicalDoubleTapSlop,
});
final double? physicalTouchSlop;
final double? physicalDoubleTapSlop;
GestureSettings copyWith({
double? physicalTouchSlop,
double? physicalDoubleTapSlop,
}) {
return GestureSettings(
physicalTouchSlop: physicalTouchSlop ?? this.physicalTouchSlop,
physicalDoubleTapSlop: physicalDoubleTapSlop ?? this.physicalDoubleTapSlop,
);
}
@override
bool operator ==(Object other) {
if (other.runtimeType != runtimeType) {
return false;
}
return other is GestureSettings &&
other.physicalTouchSlop == physicalTouchSlop &&
other.physicalDoubleTapSlop == physicalDoubleTapSlop;
}
@override
int get hashCode => Object.hash(physicalTouchSlop, physicalDoubleTapSlop);
@override
String toString() => 'GestureSettings(physicalTouchSlop: $physicalTouchSlop, physicalDoubleTapSlop: $physicalDoubleTapSlop)';
}
| engine/lib/web_ui/lib/window.dart/0 | {
"file_path": "engine/lib/web_ui/lib/window.dart",
"repo_id": "engine",
"token_count": 1591
} | 299 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_WEB_UI_SKWASM_SKWASM_SUPPORT_H_
#define FLUTTER_LIB_WEB_UI_SKWASM_SKWASM_SUPPORT_H_
#include <emscripten/threading.h>
#include <cinttypes>
#include "third_party/skia/include/core/SkPicture.h"
namespace Skwasm {
class Surface;
}
using SkwasmObject = __externref_t;
extern "C" {
extern void skwasm_setAssociatedObjectOnThread(unsigned long threadId,
void* pointer,
SkwasmObject object);
extern SkwasmObject skwasm_getAssociatedObject(void* pointer);
extern void skwasm_disposeAssociatedObjectOnThread(unsigned long threadId,
void* pointer);
extern void skwasm_registerMessageListener(pthread_t threadId);
extern void skwasm_syncTimeOriginForThread(pthread_t threadId);
extern void skwasm_dispatchRenderPictures(unsigned long threadId,
Skwasm::Surface* surface,
sk_sp<SkPicture>* pictures,
int count,
uint32_t callbackId);
extern uint32_t skwasm_createOffscreenCanvas(int width, int height);
extern void skwasm_resizeCanvas(uint32_t contextHandle, int width, int height);
extern SkwasmObject skwasm_captureImageBitmap(uint32_t contextHandle,
int width,
int height,
SkwasmObject imagePromises);
extern void skwasm_resolveAndPostImages(Skwasm::Surface* surface,
SkwasmObject imagePromises,
double rasterStart,
uint32_t callbackId);
extern unsigned int skwasm_createGlTextureFromTextureSource(
SkwasmObject textureSource,
int width,
int height);
}
#endif // FLUTTER_LIB_WEB_UI_SKWASM_SKWASM_SUPPORT_H_
| engine/lib/web_ui/skwasm/skwasm_support.h/0 | {
"file_path": "engine/lib/web_ui/skwasm/skwasm_support.h",
"repo_id": "engine",
"token_count": 1117
} | 300 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import '../common/matchers.dart';
import 'canvaskit_api_test.dart';
final bool isBlink = browserEngine == BrowserEngine.blink;
const String goodUrl = 'https://www.unpkg.com/blah-blah/33.x/canvaskit.js';
const String badUrl = 'https://www.unpkg.com/soemthing/not-canvaskit.js';
// These tests need to happen in a separate file, because a Content Security
// Policy cannot be relaxed once set, only made more strict.
void main() {
internalBootstrapBrowserTest(() => testMainWithTTOn);
}
// Enables Trusted Types, runs all `canvaskit_api_test.dart`, then tests the
// createTrustedScriptUrl function.
void testMainWithTTOn() {
enableTrustedTypes();
// Run all standard canvaskit tests, with TT on...
testMain();
group('TrustedTypes API supported', () {
test('createTrustedScriptUrl - returns TrustedScriptURL object', () async {
final Object trusted = createTrustedScriptUrl(goodUrl);
expect(trusted, isA<DomTrustedScriptURL>());
expect((trusted as DomTrustedScriptURL).url, goodUrl);
});
test('createTrustedScriptUrl - rejects bad canvaskit.js URL', () async {
expect(() {
createTrustedScriptUrl(badUrl);
}, throwsAssertionError);
});
}, skip: !isBlink);
group('Trusted Types API NOT supported', () {
test('createTrustedScriptUrl - returns unmodified url', () async {
expect(createTrustedScriptUrl(badUrl), badUrl);
});
}, skip: isBlink);
}
/// Enables Trusted Types by setting the appropriate meta tag in the DOM:
/// <meta http-equiv="Content-Security-Policy" content="require-trusted-types-for 'script'">
void enableTrustedTypes() {
print('Enabling TrustedTypes in browser window...');
final DomHTMLMetaElement enableTTMeta = createDomHTMLMetaElement()
..setAttribute('http-equiv', 'Content-Security-Policy')
..content = "require-trusted-types-for 'script'";
domDocument.head!.append(enableTTMeta);
}
| engine/lib/web_ui/test/canvaskit/canvaskit_api_tt_on_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/canvaskit/canvaskit_api_tt_on_test.dart",
"repo_id": "engine",
"token_count": 707
} | 301 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'common.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
const ui.Rect region = ui.Rect.fromLTRB(0, 0, 500, 250);
void testMain() {
group('Linear', () {
setUpCanvasKitTest(withImplicitView: true);
test('is correctly rendered', () async {
final CkPictureRecorder recorder = CkPictureRecorder();
final CkCanvas canvas = recorder.beginRecording(region);
final CkGradientLinear gradient = CkGradientLinear(
ui.Offset(region.left + region.width / 4, region.height / 2),
ui.Offset(region.right - region.width / 8, region.height / 2),
const <ui.Color>[
ui.Color(0xFF4285F4),
ui.Color(0xFF34A853),
ui.Color(0xFFFBBC05),
ui.Color(0xFFEA4335),
ui.Color(0xFF4285F4),
],
const <double>[
0.0,
0.25,
0.5,
0.75,
1.0,
],
ui.TileMode.clamp,
null);
final CkPaint paint = CkPaint()..shader = gradient;
canvas.drawRect(region, paint);
await matchPictureGolden(
'canvaskit_linear_gradient.png',
recorder.endRecording(),
region: region,
);
});
test('is correctly rendered when rotated', () async {
final CkPictureRecorder recorder = CkPictureRecorder();
final CkCanvas canvas = recorder.beginRecording(region);
final CkGradientLinear gradient = CkGradientLinear(
ui.Offset(region.left + region.width / 4, region.height / 2),
ui.Offset(region.right - region.width / 8, region.height / 2),
const <ui.Color>[
ui.Color(0xFF4285F4),
ui.Color(0xFF34A853),
ui.Color(0xFFFBBC05),
ui.Color(0xFFEA4335),
ui.Color(0xFF4285F4),
],
const <double>[
0.0,
0.25,
0.5,
0.75,
1.0,
],
ui.TileMode.clamp,
Matrix4.rotationZ(math.pi / 6.0).storage);
final CkPaint paint = CkPaint()..shader = gradient;
canvas.drawRect(region, paint);
await matchPictureGolden(
'canvaskit_linear_gradient_rotated.png',
recorder.endRecording(),
region: region,
);
});
// TODO(hterkelsen): https://github.com/flutter/flutter/issues/71520
}, skip: isSafari || isFirefox);
}
| engine/lib/web_ui/test/canvaskit/linear_gradient_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/canvaskit/linear_gradient_golden_test.dart",
"repo_id": "engine",
"token_count": 1332
} | 302 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'common.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
const ui.Rect region = ui.Rect.fromLTRB(0, 0, 500, 250);
void testMain() {
group('SweepGradient', () {
setUpCanvasKitTest(withImplicitView: true);
test('is correctly rendered', () async {
final CkPictureRecorder recorder = CkPictureRecorder();
final CkCanvas canvas = recorder.beginRecording(region);
final CkGradientSweep gradient = CkGradientSweep(
const ui.Offset(250, 125),
const <ui.Color>[
ui.Color(0xFF4285F4),
ui.Color(0xFF34A853),
ui.Color(0xFFFBBC05),
ui.Color(0xFFEA4335),
ui.Color(0xFF4285F4),
],
const <double>[
0.0,
0.25,
0.5,
0.75,
1.0,
],
ui.TileMode.clamp,
-(math.pi / 2),
math.pi * 2 - (math.pi / 2),
null);
final CkPaint paint = CkPaint()..shader = gradient;
canvas.drawRect(region, paint);
await matchPictureGolden(
'canvaskit_sweep_gradient.png',
recorder.endRecording(),
region: region,
);
});
// TODO(hterkelsen): https://github.com/flutter/flutter/issues/71520
}, skip: isSafari || isFirefox);
}
| engine/lib/web_ui/test/canvaskit/sweep_gradient_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/canvaskit/sweep_gradient_golden_test.dart",
"repo_id": "engine",
"token_count": 782
} | 303 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This is identical to
// ../../../testing/dart/channel_buffers_test.dart except for:
//
// * The imports are a bit different.
// * The main method has been renamed testMain.
// * A new main method here bootstraps the web tests.
import 'dart:async';
import 'dart:convert';
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine/browser_detection.dart';
import 'package:ui/ui.dart' as ui;
void main() {
internalBootstrapBrowserTest(() => testMain);
}
ByteData _makeByteData(String str) {
final Uint8List list = utf8.encode(str);
final ByteBuffer buffer = list.buffer;
return ByteData.view(buffer);
}
void _resize(ui.ChannelBuffers buffers, String name, int newSize) {
buffers.handleMessage(_makeByteData('resize\r$name\r$newSize'));
}
void testMain() {
test('push drain', () async {
const String channel = 'foo';
final ByteData data = _makeByteData('bar');
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
bool called = false;
void callback(ByteData? responseData) {
called = true;
}
buffers.push(channel, data, callback);
await buffers.drain(channel, (ByteData? drainedData, ui.PlatformMessageResponseCallback drainedCallback) {
expect(drainedData, equals(data));
assert(!called);
drainedCallback(drainedData);
assert(called);
return Future<void>.value();
});
});
test('drain is sync', () async {
const String channel = 'foo';
final ByteData data = _makeByteData('message');
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
void callback(ByteData? responseData) {}
buffers.push(channel, data, callback);
final List<String> log = <String>[];
final Completer<void> completer = Completer<void>();
scheduleMicrotask(() { log.add('before drain, microtask'); });
log.add('before drain');
// Ignoring the returned future because the completion of the drain is
// communicated using the `completer`.
// ignore: unawaited_futures
buffers.drain(channel, (ByteData? drainedData, ui.PlatformMessageResponseCallback drainedCallback) async {
log.add('callback');
completer.complete();
});
log.add('after drain, before await');
await completer.future;
log.add('after await');
expect(log, <String>[
'before drain',
'callback',
'after drain, before await',
'before drain, microtask',
'after await'
]);
});
test('push drain zero', () async {
const String channel = 'foo';
final ByteData data = _makeByteData('bar');
final
ui.ChannelBuffers buffers = ui.ChannelBuffers();
void callback(ByteData? responseData) {}
_resize(buffers, channel, 0);
buffers.push(channel, data, callback);
bool didCall = false;
await buffers.drain(channel, (ByteData? drainedData, ui.PlatformMessageResponseCallback drainedCallback) {
didCall = true;
return Future<void>.value();
});
expect(didCall, isFalse);
});
test('drain when empty', () async {
const String channel = 'foo';
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
bool didCall = false;
await buffers.drain(channel, (ByteData? drainedData, ui.PlatformMessageResponseCallback drainedCallback) {
didCall = true;
return Future<void>.value();
});
expect(didCall, isFalse);
});
test('overflow', () async {
const String channel = 'foo';
final ByteData one = _makeByteData('one');
final ByteData two = _makeByteData('two');
final ByteData three = _makeByteData('three');
final ByteData four = _makeByteData('four');
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
void callback(ByteData? responseData) {}
_resize(buffers, channel, 3);
buffers.push(channel, one, callback);
buffers.push(channel, two, callback);
buffers.push(channel, three, callback);
buffers.push(channel, four, callback);
int counter = 0;
await buffers.drain(channel, (ByteData? drainedData, ui.PlatformMessageResponseCallback drainedCallback) {
switch (counter) {
case 0:
expect(drainedData, equals(two));
case 1:
expect(drainedData, equals(three));
case 2:
expect(drainedData, equals(four));
}
counter += 1;
return Future<void>.value();
});
expect(counter, equals(3));
});
test('resize drop', () async {
const String channel = 'foo';
final ByteData one = _makeByteData('one');
final ByteData two = _makeByteData('two');
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
_resize(buffers, channel, 100);
void callback(ByteData? responseData) {}
buffers.push(channel, one, callback);
buffers.push(channel, two, callback);
_resize(buffers, channel, 1);
int counter = 0;
await buffers.drain(channel, (ByteData? drainedData, ui.PlatformMessageResponseCallback drainedCallback) {
switch (counter) {
case 0:
expect(drainedData, equals(two));
}
counter += 1;
return Future<void>.value();
});
expect(counter, equals(1));
});
test('resize dropping calls callback', () async {
const String channel = 'foo';
final ByteData one = _makeByteData('one');
final ByteData two = _makeByteData('two');
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
bool didCallCallback = false;
void oneCallback(ByteData? responseData) {
expect(responseData, isNull);
didCallCallback = true;
}
void twoCallback(ByteData? responseData) {
throw TestFailure('wrong callback called'); // ignore: only_throw_errors
}
_resize(buffers, channel, 100);
buffers.push(channel, one, oneCallback);
buffers.push(channel, two, twoCallback);
expect(didCallCallback, isFalse);
_resize(buffers, channel, 1);
expect(didCallCallback, isTrue);
});
test('overflow calls callback', () async {
const String channel = 'foo';
final ByteData one = _makeByteData('one');
final ByteData two = _makeByteData('two');
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
bool didCallCallback = false;
void oneCallback(ByteData? responseData) {
expect(responseData, isNull);
didCallCallback = true;
}
void twoCallback(ByteData? responseData) {
throw TestFailure('wrong callback called'); // ignore: only_throw_errors
}
_resize(buffers, channel, 1);
buffers.push(channel, one, oneCallback);
buffers.push(channel, two, twoCallback);
expect(didCallCallback, isTrue);
});
test('handle garbage', () async {
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
expect(() => buffers.handleMessage(_makeByteData('asdfasdf')),
throwsException);
});
test('handle resize garbage', () async {
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
expect(() => buffers.handleMessage(_makeByteData('resize\rfoo\rbar')),
throwsException);
});
test('ChannelBuffers.setListener', () async {
final List<String> log = <String>[];
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
final ByteData one = _makeByteData('one');
final ByteData two = _makeByteData('two');
final ByteData three = _makeByteData('three');
final ByteData four = _makeByteData('four');
final ByteData five = _makeByteData('five');
final ByteData six = _makeByteData('six');
final ByteData seven = _makeByteData('seven');
buffers.push('a', one, (ByteData? data) { });
buffers.push('b', two, (ByteData? data) { });
buffers.push('a', three, (ByteData? data) { });
log.add('top');
buffers.setListener('a', (ByteData? data, ui.PlatformMessageResponseCallback callback) {
log.add('a1: ${utf8.decode(data!.buffer.asUint8List())}');
});
log.add('-1');
await null;
log.add('-2');
buffers.setListener('a', (ByteData? data, ui.PlatformMessageResponseCallback callback) {
log.add('a2: ${utf8.decode(data!.buffer.asUint8List())}');
});
log.add('-3');
await null;
log.add('-4');
buffers.setListener('b', (ByteData? data, ui.PlatformMessageResponseCallback callback) {
log.add('b: ${utf8.decode(data!.buffer.asUint8List())}');
});
log.add('-5');
await null; // first microtask after setting listener drains the first message
await null; // second microtask ends the draining.
log.add('-6');
buffers.push('b', four, (ByteData? data) { });
buffers.push('a', five, (ByteData? data) { });
log.add('-7');
await null;
log.add('-8');
buffers.clearListener('a');
buffers.push('a', six, (ByteData? data) { });
buffers.push('b', seven, (ByteData? data) { });
await null;
log.add('-9');
expect(log, <String>[
'top',
'-1',
'a1: three',
'-2',
'-3',
'-4',
'-5',
'b: two',
'-6',
'b: four',
'a2: five',
'-7',
'-8',
'b: seven',
'-9',
]);
});
test('ChannelBuffers.clearListener', () async {
final List<String> log = <String>[];
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
final ByteData one = _makeByteData('one');
final ByteData two = _makeByteData('two');
final ByteData three = _makeByteData('three');
final ByteData four = _makeByteData('four');
buffers.handleMessage(_makeByteData('resize\ra\r10'));
buffers.push('a', one, (ByteData? data) { });
buffers.push('a', two, (ByteData? data) { });
buffers.push('a', three, (ByteData? data) { });
log.add('-1');
buffers.setListener('a', (ByteData? data, ui.PlatformMessageResponseCallback callback) {
log.add('a1: ${utf8.decode(data!.buffer.asUint8List())}');
});
await null; // handles one
log.add('-2');
buffers.clearListener('a');
await null;
log.add('-3');
buffers.setListener('a', (ByteData? data, ui.PlatformMessageResponseCallback callback) {
log.add('a2: ${utf8.decode(data!.buffer.asUint8List())}');
});
log.add('-4');
await null;
buffers.push('a', four, (ByteData? data) { });
log.add('-5');
await null;
log.add('-6');
await null;
log.add('-7');
await null;
expect(log, <String>[
'-1',
'a1: one',
'-2',
'-3',
'-4',
'a2: two',
'-5',
'a2: three',
'-6',
'a2: four',
'-7',
]);
}, skip: isWasm); // https://github.com/dart-lang/sdk/issues/50778
test('ChannelBuffers.handleMessage for resize', () async {
final List<String> log = <String>[];
final ui.ChannelBuffers buffers = _TestChannelBuffers(log);
// Created as follows:
// print(StandardMethodCodec().encodeMethodCall(MethodCall('resize', ['abcdef', 12345])).buffer.asUint8List());
// ...with three 0xFF bytes on either side to ensure the method works with an offset on the underlying buffer.
buffers.handleMessage(ByteData.sublistView(Uint8List.fromList(<int>[255, 255, 255, 7, 6, 114, 101, 115, 105, 122, 101, 12, 2, 7, 6, 97, 98, 99, 100, 101, 102, 3, 57, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255]), 3, 27));
expect(log, const <String>['resize abcdef 12345']);
});
test('ChannelBuffers.handleMessage for overflow', () async {
final List<String> log = <String>[];
final ui.ChannelBuffers buffers = _TestChannelBuffers(log);
// Created as follows:
// print(StandardMethodCodec().encodeMethodCall(MethodCall('overflow', ['abcdef', false])).buffer.asUint8List());
// ...with three 0xFF bytes on either side to ensure the method works with an offset on the underlying buffer.
buffers.handleMessage(ByteData.sublistView(Uint8List.fromList(<int>[255, 255, 255, 7, 8, 111, 118, 101, 114, 102, 108, 111, 119, 12, 2, 7, 6, 97, 98, 99, 100, 101, 102, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255]), 3, 24));
expect(log, const <String>['allowOverflow abcdef false']);
});
test('ChannelBuffers uses the right zones', () async {
final List<String> log = <String>[];
final ui.ChannelBuffers buffers = ui.ChannelBuffers();
final Zone zone1 = Zone.current.fork();
final Zone zone2 = Zone.current.fork();
zone1.run(() {
log.add('first zone run: ${Zone.current == zone1}');
buffers.setListener('a', (ByteData? data, ui.PlatformMessageResponseCallback callback) {
log.add('callback1: ${Zone.current == zone1}');
callback(data);
});
});
zone2.run(() {
log.add('second zone run: ${Zone.current == zone2}');
buffers.push('a', ByteData.sublistView(Uint8List.fromList(<int>[]), 0, 0), (ByteData? data) {
log.add('callback2: ${Zone.current == zone2}');
});
});
await null;
expect(log, <String>[
'first zone run: true',
'second zone run: true',
'callback1: true',
'callback2: true',
]);
});
}
class _TestChannelBuffers extends ui.ChannelBuffers {
_TestChannelBuffers(this.log);
final List<String> log;
@override
void resize(String name, int newSize) {
log.add('resize $name $newSize');
}
@override
void allowOverflow(String name, bool allowed) {
log.add('allowOverflow $name $allowed');
}
}
| engine/lib/web_ui/test/engine/channel_buffers_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/channel_buffers_test.dart",
"repo_id": "engine",
"token_count": 5040
} | 304 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_interop';
import 'package:js/js_util.dart' as js_util;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart' as engine;
import 'package:ui/ui_web/src/ui_web.dart' as ui_web;
@JS('_flutter')
external set _loader(JSAny? loader);
set loader(Object? l) => _loader = l?.toJSAnyShallow;
@JS('_flutter.loader.didCreateEngineInitializer')
external set didCreateEngineInitializer(JSFunction? callback);
void main() {
// Prepare _flutter.loader.didCreateEngineInitializer, so it's ready in the page ASAP.
loader = js_util.jsify(<String, Object>{
'loader': <String, Object>{
'didCreateEngineInitializer': () { print('not mocked'); }.toJS,
},
});
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
test('bootstrapEngine calls _flutter.loader.didCreateEngineInitializer callback', () async {
JSAny? engineInitializer;
void didCreateEngineInitializerMock(JSAny? obj) {
print('obj: $obj');
engineInitializer = obj;
}
// Prepare the DOM for: _flutter.loader.didCreateEngineInitializer
didCreateEngineInitializer = didCreateEngineInitializerMock.toJS;
// Reset the engine
engine.debugResetEngineInitializationState();
await ui_web.bootstrapEngine(
registerPlugins: () {},
runApp: () {},
);
// Check that the object we captured is actually a loader
expect(engineInitializer, isNotNull);
expect(js_util.hasProperty(engineInitializer!, 'initializeEngine'), isTrue, reason: 'Missing FlutterEngineInitializer method: initializeEngine.');
expect(js_util.hasProperty(engineInitializer!, 'autoStart'), isTrue, reason: 'Missing FlutterEngineInitializer method: autoStart.');
});
test('bootstrapEngine does auto-start when _flutter.loader.didCreateEngineInitializer does not exist', () async {
loader = null;
bool pluginsRegistered = false;
bool appRan = false;
void registerPluginsMock() {
pluginsRegistered = true;
}
void runAppMock() {
appRan = true;
}
// Reset the engine
engine.debugResetEngineInitializationState();
await ui_web.bootstrapEngine(
registerPlugins: registerPluginsMock,
runApp: runAppMock,
);
// Check that the object we captured is actually a loader
expect(pluginsRegistered, isTrue, reason: 'Plugins should be immediately registered in autoStart mode.');
expect(appRan, isTrue, reason: 'App should run immediately in autoStart mode');
});
// We cannot test anymore, because by now the engine has registered some stuff that can't be rewound back.
// Like the `ext.flutter.disassemble` developer extension.
}
| engine/lib/web_ui/test/engine/initialization_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/initialization_test.dart",
"repo_id": "engine",
"token_count": 936
} | 305 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_util' as js_util;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'keyboard_converter_test.dart';
const int _kNoButtonChange = -1;
List<ui.PointerData> _allPointerData(List<ui.PointerDataPacket> packets) {
return packets.expand((ui.PointerDataPacket packet) => packet.data).toList();
}
void main() {
internalBootstrapBrowserTest(() => testMain);
}
late EngineFlutterView view;
DomElement get rootElement => view.dom.rootElement;
void testMain() {
final DomElement hostElement = createDomHTMLDivElement();
setUpAll(() {
domDocument.body!.append(hostElement);
// Remove <body> margins to avoid messing up with all the test coordinates.
domDocument.body!.style.margin = '0';
});
tearDownAll(() {
hostElement.remove();
});
late List<ui.KeyData> keyDataList;
late KeyboardConverter keyboardConverter;
late PointerBinding instance;
late double dpi;
KeyboardConverter createKeyboardConverter(List<ui.KeyData> keyDataList) {
return KeyboardConverter((ui.KeyData key) {
keyDataList.add(key);
return true;
}, OperatingSystem.linux);
}
setUp(() {
keyDataList = <ui.KeyData>[];
keyboardConverter = createKeyboardConverter(keyDataList);
view = EngineFlutterView(EnginePlatformDispatcher.instance, hostElement);
instance = view.pointerBinding;
instance.debugOverrideKeyboardConverter(keyboardConverter);
ui.PlatformDispatcher.instance.onPointerDataPacket = null;
dpi = EngineFlutterDisplay.instance.devicePixelRatio;
});
tearDown(() {
keyboardConverter.dispose();
view.dispose();
PointerBinding.debugResetGlobalState();
});
test('ios workaround', () {
debugEmulateIosSafari = true;
addTearDown(() {
debugEmulateIosSafari = false;
});
final MockSafariPointerEventWorkaround mockSafariWorkaround =
MockSafariPointerEventWorkaround();
final PointerBinding instance = PointerBinding(
view,
safariWorkaround: mockSafariWorkaround,
);
expect(mockSafariWorkaround.workAroundInvoked, isIosSafari);
instance.dispose();
}, skip: !isSafari);
test('_PointerEventContext generates expected events', () {
DomPointerEvent expectCorrectType(DomEvent e) {
expect(domInstanceOfString(e, 'PointerEvent'), isTrue);
return e as DomPointerEvent;
}
List<DomPointerEvent> expectCorrectTypes(List<DomEvent> events) {
return events.map(expectCorrectType).toList();
}
final _PointerEventContext context = _PointerEventContext();
DomPointerEvent event;
List<DomPointerEvent> events;
event = expectCorrectType(context.primaryDown(clientX: 100, clientY: 101));
expect(event.type, equals('pointerdown'));
expect(event.pointerId, equals(1));
expect(event.button, equals(0));
expect(event.buttons, equals(1));
expect(event.client.x, equals(100));
expect(event.client.y, equals(101));
event = expectCorrectType(
context.mouseDown(clientX: 110, clientY: 111, button: 2, buttons: 2));
expect(event.type, equals('pointerdown'));
expect(event.pointerId, equals(1));
expect(event.button, equals(2));
expect(event.buttons, equals(2));
expect(event.client.x, equals(110));
expect(event.client.y, equals(111));
events = expectCorrectTypes(context.multiTouchDown(const <_TouchDetails>[
_TouchDetails(pointer: 100, clientX: 120, clientY: 121),
_TouchDetails(pointer: 101, clientX: 122, clientY: 123),
]));
expect(events.length, equals(2));
expect(events[0].type, equals('pointerdown'));
expect(events[0].pointerId, equals(100));
expect(events[0].button, equals(0));
expect(events[0].buttons, equals(1));
expect(events[0].client.x, equals(120));
expect(events[0].client.y, equals(121));
expect(events[1].type, equals('pointerdown'));
expect(events[1].pointerId, equals(101));
expect(events[1].button, equals(0));
expect(events[1].buttons, equals(1));
expect(events[1].client.x, equals(122));
expect(events[1].client.y, equals(123));
event = expectCorrectType(context.primaryMove(clientX: 200, clientY: 201));
expect(event.type, equals('pointermove'));
expect(event.pointerId, equals(1));
expect(event.button, equals(-1));
expect(event.buttons, equals(1));
expect(event.client.x, equals(200));
expect(event.client.y, equals(201));
event = expectCorrectType(context.mouseMove(
clientX: 210, clientY: 211, button: _kNoButtonChange, buttons: 6));
expect(event.type, equals('pointermove'));
expect(event.pointerId, equals(1));
expect(event.button, equals(-1));
expect(event.buttons, equals(6));
expect(event.client.x, equals(210));
expect(event.client.y, equals(211));
event = expectCorrectType(
context.mouseMove(clientX: 212, clientY: 213, button: 2, buttons: 6));
expect(event.type, equals('pointermove'));
expect(event.pointerId, equals(1));
expect(event.button, equals(2));
expect(event.buttons, equals(6));
expect(event.client.x, equals(212));
expect(event.client.y, equals(213));
event = expectCorrectType(
context.mouseMove(clientX: 214, clientY: 215, button: 2, buttons: 1));
expect(event.type, equals('pointermove'));
expect(event.pointerId, equals(1));
expect(event.button, equals(2));
expect(event.buttons, equals(1));
expect(event.client.x, equals(214));
expect(event.client.y, equals(215));
events = expectCorrectTypes(context.multiTouchMove(const <_TouchDetails>[
_TouchDetails(pointer: 102, clientX: 220, clientY: 221),
_TouchDetails(pointer: 103, clientX: 222, clientY: 223),
]));
expect(events.length, equals(2));
expect(events[0].type, equals('pointermove'));
expect(events[0].pointerId, equals(102));
expect(events[0].button, equals(-1));
expect(events[0].buttons, equals(1));
expect(events[0].client.x, equals(220));
expect(events[0].client.y, equals(221));
expect(events[1].type, equals('pointermove'));
expect(events[1].pointerId, equals(103));
expect(events[1].button, equals(-1));
expect(events[1].buttons, equals(1));
expect(events[1].client.x, equals(222));
expect(events[1].client.y, equals(223));
event = expectCorrectType(context.mouseLeave(clientX: 1000, clientY: 2000, buttons: 6));
expect(event.type, equals('pointerleave'));
expect(event.pointerId, equals(1));
expect(event.button, equals(0));
expect(event.buttons, equals(6));
expect(event.client.x, equals(1000));
expect(event.client.y, equals(2000));
event = expectCorrectType(context.primaryUp(clientX: 300, clientY: 301));
expect(event.type, equals('pointerup'));
expect(event.pointerId, equals(1));
expect(event.button, equals(0));
expect(event.buttons, equals(0));
expect(event.client.x, equals(300));
expect(event.client.y, equals(301));
event = expectCorrectType(
context.mouseUp(clientX: 310, clientY: 311, button: 2));
expect(event.type, equals('pointerup'));
expect(event.pointerId, equals(1));
expect(event.button, equals(2));
expect(event.buttons, equals(0));
expect(event.client.x, equals(310));
expect(event.client.y, equals(311));
events = expectCorrectTypes(context.multiTouchUp(const <_TouchDetails>[
_TouchDetails(pointer: 104, clientX: 320, clientY: 321),
_TouchDetails(pointer: 105, clientX: 322, clientY: 323),
]));
expect(events.length, equals(2));
expect(events[0].type, equals('pointerup'));
expect(events[0].pointerId, equals(104));
expect(events[0].button, equals(0));
expect(events[0].buttons, equals(0));
expect(events[0].client.x, equals(320));
expect(events[0].client.y, equals(321));
expect(events[1].type, equals('pointerup'));
expect(events[1].pointerId, equals(105));
expect(events[1].button, equals(0));
expect(events[1].buttons, equals(0));
expect(events[1].client.x, equals(322));
expect(events[1].client.y, equals(323));
event = expectCorrectType(context.hover(clientX: 400, clientY: 401));
expect(event.type, equals('pointermove'));
expect(event.pointerId, equals(1));
expect(event.button, equals(-1));
expect(event.buttons, equals(0));
expect(event.client.x, equals(400));
expect(event.client.y, equals(401));
events = expectCorrectTypes(context.multiTouchCancel(const <_TouchDetails>[
_TouchDetails(pointer: 106, clientX: 500, clientY: 501),
_TouchDetails(pointer: 107, clientX: 502, clientY: 503),
]));
expect(events.length, equals(2));
expect(events[0].type, equals('pointercancel'));
expect(events[0].pointerId, equals(106));
expect(events[0].button, equals(0));
expect(events[0].buttons, equals(0));
expect(events[0].client.x, equals(0));
expect(events[0].client.y, equals(0));
expect(events[1].type, equals('pointercancel'));
expect(events[1].pointerId, equals(107));
expect(events[1].button, equals(0));
expect(events[1].buttons, equals(0));
expect(events[1].client.x, equals(0));
expect(events[1].client.y, equals(0));
context.pressAllModifiers();
event = expectCorrectType(context.primaryDown(clientX: 100, clientY: 101));
expect(event.getModifierState('Alt'), true);
expect(event.getModifierState('Control'), true);
expect(event.getModifierState('Meta'), true);
expect(event.getModifierState('Shift'), true);
context.unpressAllModifiers();
event = expectCorrectType(context.primaryDown(clientX: 100, clientY: 101));
expect(event.getModifierState('Alt'), false);
expect(event.getModifierState('Control'), false);
expect(event.getModifierState('Meta'), false);
expect(event.getModifierState('Shift'), false);
});
// The reason we listen for pointer events in the bubble phase instead of the
// capture phase is to allow platform views and native text fields to receive
// the event first. This way, they can potentially handle the event and stop
// its propagation to prevent Flutter from receiving and handling it.
test(
'event listeners are attached to the bubble phase',
() {
final _BasicEventContext context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
final DomElement child = createDomHTMLDivElement();
rootElement.append(child);
final DomEventListener stopPropagationListener = createDomEventListener((DomEvent event) {
event.stopPropagation();
});
// The event reaches `PointerBinding` as expected.
child.dispatchEvent(context.primaryDown());
expect(packets, isNotEmpty);
packets.clear();
// The child stops propagation so the event doesn't reach `PointerBinding`.
final DomEvent event = context.primaryDown();
child.addEventListener(event.type, stopPropagationListener);
child.dispatchEvent(event);
expect(packets, isEmpty);
packets.clear();
child.remove();
},
);
test(
'can receive pointer events on the app root',
() {
final _BasicEventContext context = _PointerEventContext();
ui.PointerDataPacket? receivedPacket;
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
receivedPacket = packet;
};
rootElement.dispatchEvent(context.primaryDown());
expect(receivedPacket, isNotNull);
expect(receivedPacket!.data[0].buttons, equals(1));
},
);
test(
'does create an add event if got a pointerdown',
() {
final _BasicEventContext context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.primaryDown());
expect(packets, hasLength(1));
expect(packets.single.data, hasLength(2));
expect(packets.single.data[0].change, equals(ui.PointerChange.add));
expect(packets.single.data[1].change, equals(ui.PointerChange.down));
},
);
test(
'synthesize modifier keys left down event if left or right are not pressed',
() {
final _BasicEventContext context = _PointerEventContext();
// Should synthesize a modifier left key down event when DOM event indicates
// that the modifier key is pressed and known pressing state doesn't contain
// the modifier left key nor the modifier right key.
void shouldSynthesizeLeftDownIfNotPressed(String key) {
final int physicalLeft = kWebToPhysicalKey['${key}Left']!;
final int physicalRight = kWebToPhysicalKey['${key}Right']!;
final int logicalLeft = kWebLogicalLocationMap[key]![kLocationLeft]!;
expect(keyboardConverter.keyIsPressed(physicalLeft), false);
expect(keyboardConverter.keyIsPressed(physicalRight), false);
rootElement.dispatchEvent(context.primaryDown());
expect(keyDataList.length, 1);
expectKeyData(keyDataList.last,
type: ui.KeyEventType.down,
deviceType: ui.KeyEventDeviceType.keyboard,
physical: physicalLeft,
logical: logicalLeft,
character: null,
synthesized: true,
);
keyDataList.clear();
keyboardConverter.clearPressedKeys();
}
context.altPressed = true;
shouldSynthesizeLeftDownIfNotPressed('Alt');
context.unpressAllModifiers();
context.ctrlPressed = true;
shouldSynthesizeLeftDownIfNotPressed('Control');
context.unpressAllModifiers();
context.metaPressed = true;
shouldSynthesizeLeftDownIfNotPressed('Meta');
context.unpressAllModifiers();
context.shiftPressed = true;
shouldSynthesizeLeftDownIfNotPressed('Shift');
context.unpressAllModifiers();
},
);
test(
'should not synthesize modifier keys down event if left or right are pressed',
() {
final _BasicEventContext context = _PointerEventContext();
// Should not synthesize a modifier down event when DOM event indicates
// that the modifier key is pressed and known pressing state contains
// the modifier left key.
void shouldNotSynthesizeDownIfLeftPressed(String key, int modifiers) {
final int physicalLeft = kWebToPhysicalKey['${key}Left']!;
final int physicalRight = kWebToPhysicalKey['${key}Right']!;
keyboardConverter.handleEvent(keyDownEvent('${key}Left', key, modifiers, kLocationLeft));
expect(keyboardConverter.keyIsPressed(physicalLeft), true);
expect(keyboardConverter.keyIsPressed(physicalRight), false);
keyDataList.clear(); // Remove key data generated by handleEvent
rootElement.dispatchEvent(context.primaryDown());
expect(keyDataList.length, 0);
keyboardConverter.clearPressedKeys();
}
// Should not synthesize a modifier down event when DOM event indicates
// that the modifier key is pressed and known pressing state contains
// the modifier right key.
void shouldNotSynthesizeDownIfRightPressed(String key, int modifiers) {
final int physicalLeft = kWebToPhysicalKey['${key}Left']!;
final int physicalRight = kWebToPhysicalKey['${key}Right']!;
keyboardConverter.handleEvent(keyDownEvent('${key}Right', key, modifiers, kLocationRight));
expect(keyboardConverter.keyIsPressed(physicalLeft), false);
expect(keyboardConverter.keyIsPressed(physicalRight), true);
keyDataList.clear(); // Remove key data generated by handleEvent
rootElement.dispatchEvent(context.primaryDown());
expect(keyDataList.length, 0);
keyboardConverter.clearPressedKeys();
}
context.altPressed = true;
shouldNotSynthesizeDownIfLeftPressed('Alt', kAlt);
shouldNotSynthesizeDownIfRightPressed('Alt', kAlt);
context.unpressAllModifiers();
context.ctrlPressed = true;
shouldNotSynthesizeDownIfLeftPressed('Control', kCtrl);
shouldNotSynthesizeDownIfRightPressed('Control', kCtrl);
context.unpressAllModifiers();
context.metaPressed = true;
shouldNotSynthesizeDownIfLeftPressed('Meta', kMeta);
shouldNotSynthesizeDownIfRightPressed('Meta', kMeta);
context.unpressAllModifiers();
context.shiftPressed = true;
shouldNotSynthesizeDownIfLeftPressed('Shift', kShift);
shouldNotSynthesizeDownIfRightPressed('Shift', kShift);
context.unpressAllModifiers();
},
);
test(
'synthesize modifier keys up event if left or right are pressed',
() {
final _BasicEventContext context = _PointerEventContext();
// Should synthesize a modifier left key up event when DOM event indicates
// that the modifier key is not pressed and known pressing state contains
// the modifier left key.
void shouldSynthesizeLeftUpIfLeftPressed(String key, int modifiers) {
final int physicalLeft = kWebToPhysicalKey['${key}Left']!;
final int physicalRight = kWebToPhysicalKey['${key}Right']!;
final int logicalLeft = kWebLogicalLocationMap[key]![kLocationLeft]!;
keyboardConverter.handleEvent(keyDownEvent('${key}Left', key, modifiers, kLocationLeft));
expect(keyboardConverter.keyIsPressed(physicalLeft), true);
expect(keyboardConverter.keyIsPressed(physicalRight), false);
keyDataList.clear(); // Remove key data generated by handleEvent
rootElement.dispatchEvent(context.primaryDown());
expect(keyDataList.length, 1);
expectKeyData(keyDataList.last,
type: ui.KeyEventType.up,
deviceType: ui.KeyEventDeviceType.keyboard,
physical: physicalLeft,
logical: logicalLeft,
character: null,
synthesized: true,
);
expect(keyboardConverter.keyIsPressed(physicalLeft), false);
keyboardConverter.clearPressedKeys();
}
// Should synthesize a modifier right key up event when DOM event indicates
// that the modifier key is not pressed and known pressing state contains
// the modifier right key.
void shouldSynthesizeRightUpIfRightPressed(String key, int modifiers) {
final int physicalLeft = kWebToPhysicalKey['${key}Left']!;
final int physicalRight = kWebToPhysicalKey['${key}Right']!;
final int logicalRight = kWebLogicalLocationMap[key]![kLocationRight]!;
keyboardConverter.handleEvent(keyDownEvent('${key}Right', key, modifiers, kLocationRight));
expect(keyboardConverter.keyIsPressed(physicalLeft), false);
expect(keyboardConverter.keyIsPressed(physicalRight), true);
keyDataList.clear(); // Remove key data generated by handleEvent
rootElement.dispatchEvent(context.primaryDown());
expect(keyDataList.length, 1);
expectKeyData(keyDataList.last,
type: ui.KeyEventType.up,
deviceType: ui.KeyEventDeviceType.keyboard,
physical: physicalRight,
logical: logicalRight,
character: null,
synthesized: true,
);
expect(keyboardConverter.keyIsPressed(physicalRight), false);
keyboardConverter.clearPressedKeys();
}
context.altPressed = false;
shouldSynthesizeLeftUpIfLeftPressed('Alt', kAlt);
shouldSynthesizeRightUpIfRightPressed('Alt', kAlt);
context.ctrlPressed = false;
shouldSynthesizeLeftUpIfLeftPressed('Control', kCtrl);
shouldSynthesizeRightUpIfRightPressed('Control', kCtrl);
context.metaPressed = false;
shouldSynthesizeLeftUpIfLeftPressed('Meta', kMeta);
shouldSynthesizeRightUpIfRightPressed('Meta', kMeta);
context.shiftPressed = false;
shouldSynthesizeLeftUpIfLeftPressed('Shift', kShift);
shouldSynthesizeRightUpIfRightPressed('Shift', kShift);
},
);
test(
'should not synthesize modifier keys up event if left or right are not pressed',
() {
final _BasicEventContext context = _PointerEventContext();
// Should not synthesize a modifier up event when DOM event indicates
// that the modifier key is not pressed and known pressing state does
// not contain the modifier left key nor the modifier right key.
void shouldNotSynthesizeUpIfNotPressed(String key) {
final int physicalLeft = kWebToPhysicalKey['${key}Left']!;
final int physicalRight = kWebToPhysicalKey['${key}Right']!;
expect(keyboardConverter.keyIsPressed(physicalLeft), false);
expect(keyboardConverter.keyIsPressed(physicalRight), false);
keyDataList.clear(); // Remove key data generated by handleEvent
rootElement.dispatchEvent(context.primaryDown());
expect(keyDataList.length, 0);
keyboardConverter.clearPressedKeys();
}
context.altPressed = false;
shouldNotSynthesizeUpIfNotPressed('Alt');
context.ctrlPressed = false;
shouldNotSynthesizeUpIfNotPressed('Control');
context.metaPressed = false;
shouldNotSynthesizeUpIfNotPressed('Meta');
context.shiftPressed = false;
shouldNotSynthesizeUpIfNotPressed('Shift');
},
);
test(
'should synthesize modifier keys up event for AltGraph',
() {
final _BasicEventContext context = _PointerEventContext();
final int physicalAltRight = kWebToPhysicalKey['AltRight']!;
final int logicalAltGraph = kWebLogicalLocationMap['AltGraph']![0]!;
// Simulate pressing `AltGr` key.
keyboardConverter.handleEvent(keyDownEvent('AltRight', 'AltGraph'));
expect(keyboardConverter.keyIsPressed(physicalAltRight), true);
keyDataList.clear(); // Remove key data generated by handleEvent.
rootElement.dispatchEvent(context.primaryDown());
expect(keyDataList.length, 1);
expectKeyData(keyDataList.last,
type: ui.KeyEventType.up,
deviceType: ui.KeyEventDeviceType.keyboard,
physical: physicalAltRight,
logical: logicalAltGraph,
character: null,
synthesized: true,
);
expect(keyboardConverter.keyIsPressed(physicalAltRight), false);
keyDataList.clear();
},
);
test(
'correctly detects events on the semantics placeholder',
() {
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
final DomElement semanticsPlaceholder =
createDomElement('flt-semantics-placeholder');
rootElement.append(semanticsPlaceholder);
// Press on the semantics placeholder.
semanticsPlaceholder.dispatchEvent(context.primaryDown(
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].physicalX, equals(10.0 * dpi));
expect(packets[0].data[1].physicalY, equals(10.0 * dpi));
packets.clear();
// Drag on the semantics placeholder.
semanticsPlaceholder.dispatchEvent(context.primaryMove(
clientX: 12.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].physicalX, equals(12.0 * dpi));
expect(packets[0].data[0].physicalY, equals(10.0 * dpi));
packets.clear();
// Keep dragging.
semanticsPlaceholder.dispatchEvent(context.primaryMove(
clientX: 15.0,
clientY: 10.0,
));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].physicalX, equals(15.0 * dpi));
expect(packets[0].data[0].physicalY, equals(10.0 * dpi));
packets.clear();
// Release the pointer on the semantics placeholder.
rootElement.dispatchEvent(context.primaryUp(
clientX: 100.0,
clientY: 200.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].physicalX, equals(100.0 * dpi));
expect(packets[0].data[0].physicalY, equals(200.0 * dpi));
expect(packets[0].data[1].change, equals(ui.PointerChange.up));
expect(packets[0].data[1].physicalX, equals(100.0 * dpi));
expect(packets[0].data[1].physicalY, equals(200.0 * dpi));
packets.clear();
semanticsPlaceholder.remove();
},
skip: isFirefox, // https://bugzilla.mozilla.org/show_bug.cgi?id=1804190
);
// BUTTONED ADAPTERS
test(
'creates an add event if the first pointer activity is a hover',
() {
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.hover());
expect(packets, hasLength(1));
expect(packets.single.data, hasLength(2));
expect(packets.single.data[0].change, equals(ui.PointerChange.add));
expect(packets.single.data[0].synthesized, isTrue);
expect(packets.single.data[1].change, equals(ui.PointerChange.hover));
},
);
test(
'sends a pointermove event instead of the second pointerdown in a row',
() {
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.primaryDown(
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
// An add will be synthesized.
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
packets.clear();
rootElement.dispatchEvent(context.primaryDown(
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].buttons, equals(1));
packets.clear();
},
);
test(
'does synthesize add or hover or move for scroll',
() {
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 10,
deltaY: 10,
));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 20,
clientY: 50,
deltaX: 10,
deltaY: 10,
));
rootElement.dispatchEvent(context.mouseDown(
button: 0,
buttons: 1,
clientX: 20.0,
clientY: 50.0,
));
rootElement.dispatchEvent(context.wheel(
buttons: 1,
clientX: 30,
clientY: 60,
deltaX: 10,
deltaY: 10,
));
expect(packets, hasLength(4));
// An add will be synthesized.
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].pointerIdentifier, equals(0));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].physicalX, equals(10.0 * dpi));
expect(packets[0].data[0].physicalY, equals(10.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0.0));
expect(packets[0].data[0].physicalDeltaY, equals(0.0));
expect(packets[0].data[1].change, equals(ui.PointerChange.hover));
expect(
packets[0].data[1].signalKind, equals(ui.PointerSignalKind.scroll));
expect(packets[0].data[1].pointerIdentifier, equals(0));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].physicalX, equals(10.0 * dpi));
expect(packets[0].data[1].physicalY, equals(10.0 * dpi));
expect(packets[0].data[1].physicalDeltaX, equals(0.0));
expect(packets[0].data[1].physicalDeltaY, equals(0.0));
// A hover will be synthesized.
expect(packets[1].data, hasLength(2));
expect(packets[1].data[0].change, equals(ui.PointerChange.hover));
expect(packets[1].data[0].pointerIdentifier, equals(0));
expect(packets[1].data[0].synthesized, isTrue);
expect(packets[1].data[0].physicalX, equals(20.0 * dpi));
expect(packets[1].data[0].physicalY, equals(50.0 * dpi));
expect(packets[1].data[0].physicalDeltaX, equals(10.0 * dpi));
expect(packets[1].data[0].physicalDeltaY, equals(40.0 * dpi));
expect(packets[1].data[1].change, equals(ui.PointerChange.hover));
expect(
packets[1].data[1].signalKind, equals(ui.PointerSignalKind.scroll));
expect(packets[1].data[1].pointerIdentifier, equals(0));
expect(packets[1].data[1].synthesized, isFalse);
expect(packets[1].data[1].physicalX, equals(20.0 * dpi));
expect(packets[1].data[1].physicalY, equals(50.0 * dpi));
expect(packets[1].data[1].physicalDeltaX, equals(0.0));
expect(packets[1].data[1].physicalDeltaY, equals(0.0));
// No synthetic pointer data for down event.
expect(packets[2].data, hasLength(1));
expect(packets[2].data[0].change, equals(ui.PointerChange.down));
expect(packets[2].data[0].signalKind, equals(ui.PointerSignalKind.none));
expect(packets[2].data[0].pointerIdentifier, equals(1));
expect(packets[2].data[0].synthesized, isFalse);
expect(packets[2].data[0].physicalX, equals(20.0 * dpi));
expect(packets[2].data[0].physicalY, equals(50.0 * dpi));
expect(packets[2].data[0].physicalDeltaX, equals(0.0));
expect(packets[2].data[0].physicalDeltaY, equals(0.0));
// A move will be synthesized instead of hover because the button is currently down.
expect(packets[3].data, hasLength(2));
expect(packets[3].data[0].change, equals(ui.PointerChange.move));
expect(packets[3].data[0].pointerIdentifier, equals(1));
expect(packets[3].data[0].synthesized, isTrue);
expect(packets[3].data[0].physicalX, equals(30.0 * dpi));
expect(packets[3].data[0].physicalY, equals(60.0 * dpi));
expect(packets[3].data[0].physicalDeltaX, equals(10.0 * dpi));
expect(packets[3].data[0].physicalDeltaY, equals(10.0 * dpi));
expect(packets[3].data[1].change, equals(ui.PointerChange.hover));
expect(
packets[3].data[1].signalKind, equals(ui.PointerSignalKind.scroll));
expect(packets[3].data[1].pointerIdentifier, equals(1));
expect(packets[3].data[1].synthesized, isFalse);
expect(packets[3].data[1].physicalX, equals(30.0 * dpi));
expect(packets[3].data[1].physicalY, equals(60.0 * dpi));
expect(packets[3].data[1].physicalDeltaX, equals(0.0));
expect(packets[3].data[1].physicalDeltaY, equals(0.0));
},
);
test(
'converts scroll delta to physical pixels (macOs)',
() {
final _ButtonedEventMixin context = _PointerEventContext();
const double dpi = 2.5;
debugOperatingSystemOverride = OperatingSystem.macOs;
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(dpi);
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 10,
deltaY: 10,
));
expect(packets, hasLength(1));
// An add will be synthesized.
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
// Scroll deltas should be multiplied by `dpi`.
expect(packets[0].data[0].scrollDeltaX, equals(10.0 * dpi));
expect(packets[0].data[0].scrollDeltaY, equals(10.0 * dpi));
expect(packets[0].data[1].change, equals(ui.PointerChange.hover));
expect(packets[0].data[1].signalKind, equals(ui.PointerSignalKind.scroll));
// Scroll deltas should be multiplied by `dpi`.
expect(packets[0].data[0].scrollDeltaX, equals(10.0 * dpi));
expect(packets[0].data[0].scrollDeltaY, equals(10.0 * dpi));
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(1.0);
debugBrowserEngineOverride = null;
},
);
test(
'does set pointer device kind based on delta precision and wheelDelta',
() {
if (isFirefox) {
// Firefox does not support trackpad events, as they cannot be
// disambiguated from smoothed mouse wheel events.
return;
}
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 119,
deltaY: 119,
wheelDeltaX: -357,
wheelDeltaY: -357,
timeStamp: 0,
));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 120,
deltaY: 120,
wheelDeltaX: -360,
wheelDeltaY: -360,
timeStamp: 10,
));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 120,
deltaY: 120,
wheelDeltaX: -360,
wheelDeltaY: -360,
timeStamp: 20,
));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 119,
deltaY: 119,
wheelDeltaX: -357,
wheelDeltaY: -357,
timeStamp: 1000,
));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: -120,
deltaY: -120,
wheelDeltaX: 360,
wheelDeltaY: 360,
timeStamp: 1010,
));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 0,
deltaY: -120,
wheelDeltaX: 0,
wheelDeltaY: 360,
timeStamp: 2000,
));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 0,
deltaY: 40,
wheelDeltaX: 0,
wheelDeltaY: -360,
timeStamp: 3000,
));
expect(packets, hasLength(7));
// An add will be synthesized.
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].pointerIdentifier, equals(0));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].physicalX, equals(10.0 * dpi));
expect(packets[0].data[0].physicalY, equals(10.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0.0));
expect(packets[0].data[0].physicalDeltaY, equals(0.0));
// Because the delta is not in increments of 120 and has matching wheelDelta,
// it will be a trackpad event.
expect(packets[0].data[1].change, equals(ui.PointerChange.hover));
expect(
packets[0].data[1].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[0].data[1].kind, equals(ui.PointerDeviceKind.trackpad));
expect(packets[0].data[1].device, equals(-2));
expect(packets[0].data[1].pointerIdentifier, equals(0));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].physicalX, equals(10.0 * dpi));
expect(packets[0].data[1].physicalY, equals(10.0 * dpi));
expect(packets[0].data[1].physicalDeltaX, equals(0.0));
expect(packets[0].data[1].physicalDeltaY, equals(0.0));
expect(packets[0].data[1].scrollDeltaX, equals(119.0));
expect(packets[0].data[1].scrollDeltaY, equals(119.0));
// Because the delta is in increments of 120, but is similar to the
// previous event, it will be a trackpad event.
expect(packets[1].data[0].change, equals(ui.PointerChange.hover));
expect(
packets[1].data[0].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[1].data[0].kind, equals(ui.PointerDeviceKind.trackpad));
expect(packets[1].data[0].device, equals(-2));
expect(packets[1].data[0].pointerIdentifier, equals(0));
expect(packets[1].data[0].synthesized, isFalse);
expect(packets[1].data[0].physicalX, equals(10.0 * dpi));
expect(packets[1].data[0].physicalY, equals(10.0 * dpi));
expect(packets[1].data[0].physicalDeltaX, equals(0.0));
expect(packets[1].data[0].physicalDeltaY, equals(0.0));
expect(packets[1].data[0].scrollDeltaX, equals(120.0));
expect(packets[1].data[0].scrollDeltaY, equals(120.0));
// Because the delta is in increments of 120, but is again similar to the
// previous event, it will be a trackpad event.
expect(packets[2].data[0].change, equals(ui.PointerChange.hover));
expect(
packets[2].data[0].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[2].data[0].kind, equals(ui.PointerDeviceKind.trackpad));
expect(packets[2].data[0].device, equals(-2));
expect(packets[2].data[0].pointerIdentifier, equals(0));
expect(packets[2].data[0].synthesized, isFalse);
expect(packets[2].data[0].physicalX, equals(10.0 * dpi));
expect(packets[2].data[0].physicalY, equals(10.0 * dpi));
expect(packets[2].data[0].physicalDeltaX, equals(0.0));
expect(packets[2].data[0].physicalDeltaY, equals(0.0));
expect(packets[2].data[0].scrollDeltaX, equals(120.0));
expect(packets[2].data[0].scrollDeltaY, equals(120.0));
// Because the delta is not in increments of 120 and has matching wheelDelta,
// it will be a trackpad event.
expect(packets[3].data[0].change, equals(ui.PointerChange.hover));
expect(
packets[3].data[0].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[3].data[0].kind, equals(ui.PointerDeviceKind.trackpad));
expect(packets[3].data[0].device, equals(-2));
expect(packets[3].data[0].pointerIdentifier, equals(0));
expect(packets[3].data[0].synthesized, isFalse);
expect(packets[3].data[0].physicalX, equals(10.0 * dpi));
expect(packets[3].data[0].physicalY, equals(10.0 * dpi));
expect(packets[3].data[0].physicalDeltaX, equals(0.0));
expect(packets[3].data[0].physicalDeltaY, equals(0.0));
expect(packets[3].data[0].scrollDeltaX, equals(119.0));
expect(packets[3].data[0].scrollDeltaY, equals(119.0));
// Because the delta is in increments of 120, and is not similar to the
// previous event, but occurred soon after the previous event, it will be
// a trackpad event.
expect(packets[4].data[0].change, equals(ui.PointerChange.hover));
expect(
packets[4].data[0].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[4].data[0].kind, equals(ui.PointerDeviceKind.trackpad));
expect(packets[4].data[0].device, equals(-2));
expect(packets[4].data[0].pointerIdentifier, equals(0));
expect(packets[4].data[0].synthesized, isFalse);
expect(packets[4].data[0].physicalX, equals(10.0 * dpi));
expect(packets[4].data[0].physicalY, equals(10.0 * dpi));
expect(packets[4].data[0].physicalDeltaX, equals(0.0));
expect(packets[4].data[0].physicalDeltaY, equals(0.0));
expect(packets[4].data[0].scrollDeltaX, equals(-120.0));
expect(packets[4].data[0].scrollDeltaY, equals(-120.0));
// An add will be synthesized.
expect(packets[5].data, hasLength(2));
expect(packets[5].data[0].change, equals(ui.PointerChange.add));
expect(
packets[5].data[0].signalKind, equals(ui.PointerSignalKind.none));
expect(
packets[5].data[0].kind, equals(ui.PointerDeviceKind.mouse));
expect(packets[5].data[0].device, equals(-1));
expect(packets[5].data[0].pointerIdentifier, equals(0));
expect(packets[5].data[0].synthesized, isTrue);
expect(packets[5].data[0].physicalX, equals(10.0 * dpi));
expect(packets[5].data[0].physicalY, equals(10.0 * dpi));
expect(packets[5].data[0].physicalDeltaX, equals(0.0));
expect(packets[5].data[0].physicalDeltaY, equals(0.0));
expect(packets[5].data[0].scrollDeltaX, equals(0.0));
expect(packets[5].data[0].scrollDeltaY, equals(-120.0));
// Because the delta is in increments of 120, and is not similar to
// the previous event, and occurred long after the previous event, it will
// be a mouse event.
expect(packets[5].data[1].change, equals(ui.PointerChange.hover));
expect(
packets[5].data[1].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[5].data[1].kind, equals(ui.PointerDeviceKind.mouse));
expect(packets[5].data[1].device, equals(-1));
expect(packets[5].data[1].pointerIdentifier, equals(0));
expect(packets[5].data[1].synthesized, isFalse);
expect(packets[5].data[1].physicalX, equals(10.0 * dpi));
expect(packets[5].data[1].physicalY, equals(10.0 * dpi));
expect(packets[5].data[1].physicalDeltaX, equals(0.0));
expect(packets[5].data[1].physicalDeltaY, equals(0.0));
expect(packets[5].data[1].scrollDeltaX, equals(0.0));
expect(packets[5].data[1].scrollDeltaY, equals(-120.0));
// Because the delta is not in increments of 120 and has non-matching
// wheelDelta, it will be a mouse event.
expect(packets[6].data, hasLength(1));
expect(packets[6].data[0].change, equals(ui.PointerChange.hover));
expect(
packets[6].data[0].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[6].data[0].kind, equals(ui.PointerDeviceKind.mouse));
expect(packets[6].data[0].device, equals(-1));
expect(packets[6].data[0].pointerIdentifier, equals(0));
expect(packets[6].data[0].synthesized, isFalse);
expect(packets[6].data[0].physicalX, equals(10.0 * dpi));
expect(packets[6].data[0].physicalY, equals(10.0 * dpi));
expect(packets[6].data[0].physicalDeltaX, equals(0.0));
expect(packets[6].data[0].physicalDeltaY, equals(0.0));
expect(packets[6].data[0].scrollDeltaX, equals(0.0));
expect(packets[6].data[0].scrollDeltaY, equals(40.0));
},
);
test(
'does choose scroll vs scale based on ctrlKey',
() {
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
debugOperatingSystemOverride = OperatingSystem.macOs;
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 0,
deltaY: 120,
));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 0,
deltaY: 100,
ctrlKey: true,
));
keyboardConverter.handleEvent(keyDownEvent('ControlLeft', 'Control', kCtrl));
rootElement.dispatchEvent(context.wheel(
buttons: 0,
clientX: 10,
clientY: 10,
deltaX: 0,
deltaY: 240,
ctrlKey: true,
));
keyboardConverter.handleEvent(keyUpEvent('ControlLeft', 'Control', kCtrl));
expect(packets, hasLength(3));
// An add will be synthesized.
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].pointerIdentifier, equals(0));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].physicalX, equals(10.0 * dpi));
expect(packets[0].data[0].physicalY, equals(10.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0.0));
expect(packets[0].data[0].physicalDeltaY, equals(0.0));
// Because ctrlKey is not pressed, it will be a scroll.
expect(packets[0].data[1].change, equals(ui.PointerChange.hover));
expect(
packets[0].data[1].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[0].data[1].kind, equals(ui.PointerDeviceKind.mouse));
expect(packets[0].data[1].pointerIdentifier, equals(0));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].physicalX, equals(10.0 * dpi));
expect(packets[0].data[1].physicalY, equals(10.0 * dpi));
expect(packets[0].data[1].physicalDeltaX, equals(0.0));
expect(packets[0].data[1].physicalDeltaY, equals(0.0));
expect(packets[0].data[1].scrollDeltaX, equals(0.0));
expect(packets[0].data[1].scrollDeltaY, equals(120.0));
// Because ctrlKey is pressed, it will be a scale.
expect(packets[1].data, hasLength(1));
expect(packets[1].data[0].change, equals(ui.PointerChange.hover));
expect(
packets[1].data[0].signalKind, equals(ui.PointerSignalKind.scale));
expect(
packets[1].data[0].kind, equals(ui.PointerDeviceKind.mouse));
expect(packets[1].data[0].pointerIdentifier, equals(0));
expect(packets[1].data[0].synthesized, isFalse);
expect(packets[1].data[0].physicalX, equals(10.0 * dpi));
expect(packets[1].data[0].physicalY, equals(10.0 * dpi));
expect(packets[1].data[0].physicalDeltaX, equals(0.0));
expect(packets[1].data[0].physicalDeltaY, equals(0.0));
expect(packets[1].data[0].scale, closeTo(0.60653065971, 1e-10)); // math.exp(-100/200)
// [macOS only]: Because ctrlKey is true, but the key is pressed physically, it will be a scroll.
expect(packets[2].data, hasLength(1));
expect(packets[2].data[0].change, equals(ui.PointerChange.hover));
expect(
packets[2].data[0].signalKind, equals(ui.PointerSignalKind.scroll));
expect(
packets[2].data[0].kind, equals(ui.PointerDeviceKind.mouse));
expect(packets[2].data[0].pointerIdentifier, equals(0));
expect(packets[2].data[0].synthesized, isFalse);
expect(packets[2].data[0].physicalX, equals(10.0 * dpi));
expect(packets[2].data[0].physicalY, equals(10.0 * dpi));
expect(packets[2].data[0].physicalDeltaX, equals(0.0));
expect(packets[2].data[0].physicalDeltaY, equals(0.0));
expect(packets[2].data[0].scrollDeltaX, equals(0.0));
expect(packets[2].data[0].scrollDeltaY, equals(240.0));
debugOperatingSystemOverride = null;
},
);
test(
'does calculate delta and pointer identifier correctly',
() {
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.hover(
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].pointerIdentifier, equals(0));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].physicalX, equals(10.0 * dpi));
expect(packets[0].data[0].physicalY, equals(10.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0.0));
expect(packets[0].data[0].physicalDeltaY, equals(0.0));
expect(packets[0].data[1].change, equals(ui.PointerChange.hover));
expect(packets[0].data[1].pointerIdentifier, equals(0));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].physicalX, equals(10.0 * dpi));
expect(packets[0].data[1].physicalY, equals(10.0 * dpi));
expect(packets[0].data[1].physicalDeltaX, equals(0.0));
expect(packets[0].data[1].physicalDeltaY, equals(0.0));
packets.clear();
rootElement.dispatchEvent(context.hover(
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.hover));
expect(packets[0].data[0].pointerIdentifier, equals(0));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20.0 * dpi));
expect(packets[0].data[0].physicalY, equals(20.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(10.0 * dpi));
expect(packets[0].data[0].physicalDeltaY, equals(10.0 * dpi));
packets.clear();
rootElement.dispatchEvent(context.primaryDown(
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.down));
expect(packets[0].data[0].pointerIdentifier, equals(1));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20.0 * dpi));
expect(packets[0].data[0].physicalY, equals(20.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0.0));
expect(packets[0].data[0].physicalDeltaY, equals(0.0));
packets.clear();
rootElement.dispatchEvent(context.primaryMove(
clientX: 40.0,
clientY: 30.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].pointerIdentifier, equals(1));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(40.0 * dpi));
expect(packets[0].data[0].physicalY, equals(30.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(20.0 * dpi));
expect(packets[0].data[0].physicalDeltaY, equals(10.0 * dpi));
packets.clear();
rootElement.dispatchEvent(context.primaryUp(
clientX: 40.0,
clientY: 30.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].pointerIdentifier, equals(1));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(40.0 * dpi));
expect(packets[0].data[0].physicalY, equals(30.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0.0));
expect(packets[0].data[0].physicalDeltaY, equals(0.0));
packets.clear();
rootElement.dispatchEvent(context.hover(
clientX: 20.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.hover));
expect(packets[0].data[0].pointerIdentifier, equals(1));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20.0 * dpi));
expect(packets[0].data[0].physicalY, equals(10.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(-20.0 * dpi));
expect(packets[0].data[0].physicalDeltaY, equals(-20.0 * dpi));
packets.clear();
rootElement.dispatchEvent(context.primaryDown(
clientX: 20.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.down));
expect(packets[0].data[0].pointerIdentifier, equals(2));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20.0 * dpi));
expect(packets[0].data[0].physicalY, equals(10.0 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0.0));
expect(packets[0].data[0].physicalDeltaY, equals(0.0));
packets.clear();
},
);
test(
'correctly converts buttons of down, move, leave, and up events',
() {
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Add and hover
rootElement.dispatchEvent(context.hover(
clientX: 10,
clientY: 11,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].physicalX, equals(10 * dpi));
expect(packets[0].data[0].physicalY, equals(11 * dpi));
expect(packets[0].data[1].change, equals(ui.PointerChange.hover));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].physicalX, equals(10 * dpi));
expect(packets[0].data[1].physicalY, equals(11 * dpi));
expect(packets[0].data[1].buttons, equals(0));
packets.clear();
rootElement.dispatchEvent(context.mouseDown(
button: 0,
buttons: 1,
clientX: 10.0,
clientY: 11.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.down));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(10 * dpi));
expect(packets[0].data[0].physicalY, equals(11 * dpi));
expect(packets[0].data[0].buttons, equals(1));
packets.clear();
rootElement.dispatchEvent(context.mouseMove(
button: _kNoButtonChange,
buttons: 1,
clientX: 20.0,
clientY: 21.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20 * dpi));
expect(packets[0].data[0].physicalY, equals(21 * dpi));
expect(packets[0].data[0].buttons, equals(1));
packets.clear();
rootElement.dispatchEvent(context.mouseUp(
button: 0,
clientX: 20.0,
clientY: 21.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20 * dpi));
expect(packets[0].data[0].physicalY, equals(21 * dpi));
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
// Drag with secondary button
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
clientX: 20.0,
clientY: 21.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.down));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20 * dpi));
expect(packets[0].data[0].physicalY, equals(21 * dpi));
expect(packets[0].data[0].buttons, equals(2));
packets.clear();
rootElement.dispatchEvent(context.mouseMove(
button: _kNoButtonChange,
buttons: 2,
clientX: 30.0,
clientY: 31.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(30 * dpi));
expect(packets[0].data[0].physicalY, equals(31 * dpi));
expect(packets[0].data[0].buttons, equals(2));
packets.clear();
rootElement.dispatchEvent(context.mouseUp(
button: 2,
clientX: 30.0,
clientY: 31.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(30 * dpi));
expect(packets[0].data[0].physicalY, equals(31 * dpi));
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
// Drag with middle button
rootElement.dispatchEvent(context.mouseDown(
button: 1,
buttons: 4,
clientX: 30.0,
clientY: 31.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.down));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(30 * dpi));
expect(packets[0].data[0].physicalY, equals(31 * dpi));
expect(packets[0].data[0].buttons, equals(4));
packets.clear();
rootElement.dispatchEvent(context.mouseMove(
button: _kNoButtonChange,
buttons: 4,
clientX: 40.0,
clientY: 41.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(40 * dpi));
expect(packets[0].data[0].physicalY, equals(41 * dpi));
expect(packets[0].data[0].buttons, equals(4));
packets.clear();
rootElement.dispatchEvent(context.mouseUp(
button: 1,
clientX: 40.0,
clientY: 41.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(40 * dpi));
expect(packets[0].data[0].physicalY, equals(41 * dpi));
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
// Leave
rootElement.dispatchEvent(context.mouseLeave(
buttons: 1,
clientX: 1000.0,
clientY: 2000.0,
));
expect(packets, isEmpty);
packets.clear();
rootElement.dispatchEvent(context.mouseLeave(
buttons: 0,
clientX: 1000.0,
clientY: 2000.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.hover));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(1000 * dpi));
expect(packets[0].data[0].physicalY, equals(2000 * dpi));
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
},
);
test(
'correctly handles button changes during a down sequence',
() {
final _ButtonedEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press LMB.
rootElement.dispatchEvent(context.mouseDown(
button: 0,
buttons: 1,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(1));
packets.clear();
// Press MMB.
rootElement.dispatchEvent(context.mouseMove(
button: 1,
buttons: 5,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(5));
packets.clear();
// Release LMB.
rootElement.dispatchEvent(context.mouseMove(
button: 0,
buttons: 4,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(4));
packets.clear();
// Release MMB.
rootElement.dispatchEvent(context.mouseUp(
button: 1,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
},
);
test(
'synthesizes a pointerup event when pointermove comes before the up',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen when the user pops up the context menu by right
// clicking, then dismisses it with a left click.
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
clientX: 10,
clientY: 11,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].physicalX, equals(10 * dpi));
expect(packets[0].data[0].physicalY, equals(11 * dpi));
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].physicalX, equals(10 * dpi));
expect(packets[0].data[1].physicalY, equals(11 * dpi));
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
rootElement.dispatchEvent(context.mouseMove(
button: _kNoButtonChange,
buttons: 2,
clientX: 20.0,
clientY: 21.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20 * dpi));
expect(packets[0].data[0].physicalY, equals(21 * dpi));
expect(packets[0].data[0].buttons, equals(2));
packets.clear();
rootElement.dispatchEvent(context.mouseMove(
button: _kNoButtonChange,
buttons: 2,
clientX: 20.0,
clientY: 21.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20 * dpi));
expect(packets[0].data[0].physicalY, equals(21 * dpi));
expect(packets[0].data[0].buttons, equals(2));
packets.clear();
rootElement.dispatchEvent(context.mouseUp(
button: 2,
clientX: 20.0,
clientY: 21.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].physicalX, equals(20 * dpi));
expect(packets[0].data[0].physicalY, equals(21 * dpi));
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
},
);
test(
'correctly handles uncontinuous button changes during a down sequence',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen with the following gesture sequence:
//
// - Pops up the context menu by right clicking, but holds RMB;
// - Clicks LMB;
// - Releases RMB.
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press RMB and hold, popping up the context menu.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
// Press LMB. The event will have "button: -1" here, despite the change
// in "buttons", probably because the "press" gesture was absorbed by
// dismissing the context menu.
rootElement.dispatchEvent(context.mouseMove(
button: _kNoButtonChange,
buttons: 3,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(3));
packets.clear();
// Release LMB.
rootElement.dispatchEvent(context.mouseMove(
button: 0,
buttons: 2,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(2));
packets.clear();
// Release RMB.
rootElement.dispatchEvent(context.mouseUp(
button: 2,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
},
);
test(
'correctly handles missing right mouse button up when followed by move',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen with the following gesture sequence:
//
// - Pops up the context menu by right clicking;
// - Clicks LMB to close context menu.
// - Moves mouse.
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press RMB popping up the context menu, then release by LMB down and up.
// Browser won't send up event in that case.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
// User now hovers.
rootElement.dispatchEvent(context.mouseMove(
button: _kNoButtonChange,
buttons: 0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(0));
expect(packets[0].data[1].change, equals(ui.PointerChange.hover));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(0));
packets.clear();
},
);
test(
'handles RMB click when the browser sends it as a move',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// When the user clicks the RMB and moves the mouse quickly (before the
// context menu shows up), the browser sends a move event before down.
// The move event will have "button:-1, buttons:2".
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press RMB and hold, popping up the context menu.
rootElement.dispatchEvent(context.mouseMove(
button: -1,
buttons: 2,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.hover));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(0));
packets.clear();
},
);
test(
'correctly handles hover after RMB click',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen with the following gesture sequence:
//
// - Pops up the context menu by right clicking, but holds RMB;
// - Move the pointer to hover.
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press RMB and hold, popping up the context menu.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
// Move the mouse. The event will have "buttons: 0" because RMB was
// released but the browser didn't send a pointerup/mouseup event.
// The hover is also triggered at a different position.
rootElement.dispatchEvent(context.hover(
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(3));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].buttons, equals(2));
expect(packets[0].data[1].change, equals(ui.PointerChange.up));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(0));
expect(packets[0].data[2].change, equals(ui.PointerChange.hover));
expect(packets[0].data[2].synthesized, isFalse);
expect(packets[0].data[2].buttons, equals(0));
packets.clear();
},
);
test(
'correctly handles LMB click after RMB click',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen with the following gesture sequence:
//
// - Pops up the context menu by right clicking, but holds RMB;
// - Clicks LMB in a different location;
// - Release LMB.
//
// The LMB click occurs in a different location because when RMB is
// clicked, and the contextmenu is shown, the browser stops sending
// `pointermove`/`mousemove` events. Then when the LMB click comes in, it
// could be in a different location without any `*move` events in between.
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press RMB and hold, popping up the context menu.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
// Press LMB.
rootElement.dispatchEvent(context.mouseDown(
button: 0,
buttons: 3,
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(3));
packets.clear();
// Release LMB.
rootElement.dispatchEvent(context.primaryUp(
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
},
);
test(
'correctly handles two consecutive RMB clicks with no up in between',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen with the following gesture sequence:
//
// - Pops up the context menu by right clicking, but holds RMB;
// - Clicks RMB again in a different location;
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press RMB and hold, popping up the context menu.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
// Press RMB again. In Chrome, when RMB is clicked again while the
// context menu is still active, it sends a pointerdown/mousedown event
// with "buttons:0". We convert this to pointer up, pointer down.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 0,
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(3));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].buttons, equals(2));
expect(packets[0].data[0].physicalX, equals(20.0 * dpi));
expect(packets[0].data[0].physicalY, equals(20.0 * dpi));
expect(packets[0].data[1].change, equals(ui.PointerChange.up));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(0));
expect(packets[0].data[2].change, equals(ui.PointerChange.down));
expect(packets[0].data[2].synthesized, isFalse);
expect(packets[0].data[2].buttons, equals(2));
packets.clear();
// Release RMB.
rootElement.dispatchEvent(context.mouseUp(
button: 2,
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
},
);
test(
'correctly handles two consecutive RMB clicks with up in between',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen with the following gesture sequence:
//
// - Pops up the context menu by right clicking, but doesn't hold RMB;
// - Clicks RMB again in a different location;
//
// This seems to be happening sometimes when using RMB on the Mac trackpad.
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press RMB, popping up the context menu.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
// RMB up.
rootElement.dispatchEvent(context.mouseUp(
button: 2,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
// Press RMB again. In Chrome, when RMB is clicked again while the
// context menu is still active, it sends a pointerdown/mousedown event
// with "buttons:0".
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 0,
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.hover));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].buttons, equals(0));
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
// Release RMB.
rootElement.dispatchEvent(context.mouseUp(
button: 2,
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].synthesized, isFalse);
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
},
);
test(
'correctly handles two consecutive RMB clicks in two different locations',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen with the following gesture sequence:
//
// - Pops up the context menu by right clicking;
// - The browser sends RMB up event;
// - Click RMB again in a different location;
//
// This scenario happens occasionally. I'm still not sure why, but in some
// cases, the browser actually sends an `up` event for the RMB click even
// when the context menu is shown.
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press RMB and hold, popping up the context menu.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
// Release RMB.
rootElement.dispatchEvent(context.mouseUp(
button: 2,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
// Press RMB again, in a different location.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 2,
clientX: 20.0,
clientY: 20.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.hover));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].buttons, equals(0));
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(2));
packets.clear();
},
);
test(
'handles overlapping left/right down and up events',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen with the following gesture sequence:
//
// LMB: down-------------------up
// RMB: down------------------up
// Flutter: down-------move-------move-------up
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press and hold LMB.
rootElement.dispatchEvent(context.mouseDown(
button: 0,
buttons: 1,
clientX: 5.0,
clientY: 100.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].synthesized, isFalse);
expect(packets[0].data[1].buttons, equals(1));
expect(packets[0].data[1].physicalX, equals(5.0 * dpi));
expect(packets[0].data[1].physicalY, equals(100.0 * dpi));
packets.clear();
// Press and hold RMB. The pointer is already down, so we only send a move
// to update the position of the pointer.
rootElement.dispatchEvent(context.mouseDown(
button: 2,
buttons: 3,
clientX: 20.0,
clientY: 100.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].buttons, equals(3));
expect(packets[0].data[0].physicalX, equals(20.0 * dpi));
expect(packets[0].data[0].physicalY, equals(100.0 * dpi));
packets.clear();
// Release LMB. The pointer is still down (RMB), so we only send a move to
// update the position of the pointer.
rootElement.dispatchEvent(context.mouseUp(
button: 0,
buttons: 2,
clientX: 30.0,
clientY: 100.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].buttons, equals(2));
expect(packets[0].data[0].physicalX, equals(30.0 * dpi));
expect(packets[0].data[0].physicalY, equals(100.0 * dpi));
packets.clear();
// Release RMB. There's no more buttons down, so we send an up event.
rootElement.dispatchEvent(context.mouseUp(
button: 2,
buttons: 0,
clientX: 30.0,
clientY: 100.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].buttons, equals(0));
packets.clear();
},
);
test(
'correctly detects up event outside of flutterViewElement',
() {
final _ButtonedEventMixin context = _PointerEventContext();
// This can happen when the up event occurs while the mouse is outside the
// browser window.
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Press and drag around.
rootElement.dispatchEvent(context.primaryDown(
clientX: 10.0,
clientY: 10.0,
));
rootElement.dispatchEvent(context.primaryMove(
clientX: 12.0,
clientY: 10.0,
));
rootElement.dispatchEvent(context.primaryMove(
clientX: 15.0,
clientY: 10.0,
));
rootElement.dispatchEvent(context.primaryMove(
clientX: 20.0,
clientY: 10.0,
));
packets.clear();
// Move outside the flutterViewElement.
rootElement.dispatchEvent(context.primaryMove(
clientX: 900.0,
clientY: 1900.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(1));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].physicalX, equals(900.0 * dpi));
expect(packets[0].data[0].physicalY, equals(1900.0 * dpi));
packets.clear();
// Release outside the flutterViewElement.
rootElement.dispatchEvent(context.primaryUp(
clientX: 1000.0,
clientY: 2000.0,
));
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.move));
expect(packets[0].data[0].physicalX, equals(1000.0 * dpi));
expect(packets[0].data[0].physicalY, equals(2000.0 * dpi));
expect(packets[0].data[1].change, equals(ui.PointerChange.up));
expect(packets[0].data[1].physicalX, equals(1000.0 * dpi));
expect(packets[0].data[1].physicalY, equals(2000.0 * dpi));
packets.clear();
},
);
// MULTIPOINTER ADAPTERS
test(
'treats each pointer separately',
() {
final _MultiPointerEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
List<ui.PointerData> data;
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Two pointers down
context.multiTouchDown(const <_TouchDetails>[
_TouchDetails(pointer: 2, clientX: 100, clientY: 101),
_TouchDetails(pointer: 3, clientX: 200, clientY: 201),
]).forEach(rootElement.dispatchEvent);
expect(packets.length, 2);
expect(packets[0].data.length, 2);
expect(packets[1].data.length, 2);
data = _allPointerData(packets);
expect(data, hasLength(4));
expect(data[0].change, equals(ui.PointerChange.add));
expect(data[0].synthesized, isTrue);
expect(data[0].device, equals(2));
expect(data[0].physicalX, equals(100 * dpi));
expect(data[0].physicalY, equals(101 * dpi));
expect(data[1].change, equals(ui.PointerChange.down));
expect(data[1].device, equals(2));
expect(data[1].buttons, equals(1));
expect(data[1].physicalX, equals(100 * dpi));
expect(data[1].physicalY, equals(101 * dpi));
expect(data[1].physicalDeltaX, equals(0));
expect(data[1].physicalDeltaY, equals(0));
expect(data[2].change, equals(ui.PointerChange.add));
expect(data[2].synthesized, isTrue);
expect(data[2].device, equals(3));
expect(data[2].physicalX, equals(200 * dpi));
expect(data[2].physicalY, equals(201 * dpi));
expect(data[3].change, equals(ui.PointerChange.down));
expect(data[3].device, equals(3));
expect(data[3].buttons, equals(1));
expect(data[3].physicalX, equals(200 * dpi));
expect(data[3].physicalY, equals(201 * dpi));
expect(data[3].physicalDeltaX, equals(0));
expect(data[3].physicalDeltaY, equals(0));
packets.clear();
// Two pointers move
context.multiTouchMove(const <_TouchDetails>[
_TouchDetails(pointer: 3, clientX: 300, clientY: 302),
_TouchDetails(pointer: 2, clientX: 400, clientY: 402),
]).forEach(rootElement.dispatchEvent);
expect(packets.length, 2);
expect(packets[0].data.length, 1);
expect(packets[1].data.length, 1);
data = _allPointerData(packets);
expect(data, hasLength(2));
expect(data[0].change, equals(ui.PointerChange.move));
expect(data[0].device, equals(3));
expect(data[0].buttons, equals(1));
expect(data[0].physicalX, equals(300 * dpi));
expect(data[0].physicalY, equals(302 * dpi));
expect(data[0].physicalDeltaX, equals(100 * dpi));
expect(data[0].physicalDeltaY, equals(101 * dpi));
expect(data[1].change, equals(ui.PointerChange.move));
expect(data[1].device, equals(2));
expect(data[1].buttons, equals(1));
expect(data[1].physicalX, equals(400 * dpi));
expect(data[1].physicalY, equals(402 * dpi));
expect(data[1].physicalDeltaX, equals(300 * dpi));
expect(data[1].physicalDeltaY, equals(301 * dpi));
packets.clear();
// One pointer up
context.multiTouchUp(const <_TouchDetails>[
_TouchDetails(pointer: 3, clientX: 300, clientY: 302),
]).forEach(rootElement.dispatchEvent);
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].device, equals(3));
expect(packets[0].data[0].buttons, equals(0));
expect(packets[0].data[0].physicalX, equals(300 * dpi));
expect(packets[0].data[0].physicalY, equals(302 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0));
expect(packets[0].data[0].physicalDeltaY, equals(0));
expect(packets[0].data[1].change, equals(ui.PointerChange.remove));
expect(packets[0].data[1].device, equals(3));
expect(packets[0].data[1].buttons, equals(0));
expect(packets[0].data[1].physicalX, equals(300 * dpi));
expect(packets[0].data[1].physicalY, equals(302 * dpi));
expect(packets[0].data[1].physicalDeltaX, equals(0));
expect(packets[0].data[1].physicalDeltaY, equals(0));
packets.clear();
// Another pointer up
context.multiTouchUp(const <_TouchDetails>[
_TouchDetails(pointer: 2, clientX: 400, clientY: 402),
]).forEach(rootElement.dispatchEvent);
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.up));
expect(packets[0].data[0].device, equals(2));
expect(packets[0].data[0].buttons, equals(0));
expect(packets[0].data[0].physicalX, equals(400 * dpi));
expect(packets[0].data[0].physicalY, equals(402 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0));
expect(packets[0].data[0].physicalDeltaY, equals(0));
expect(packets[0].data[1].change, equals(ui.PointerChange.remove));
expect(packets[0].data[1].device, equals(2));
expect(packets[0].data[1].buttons, equals(0));
expect(packets[0].data[1].physicalX, equals(400 * dpi));
expect(packets[0].data[1].physicalY, equals(402 * dpi));
expect(packets[0].data[1].physicalDeltaX, equals(0));
expect(packets[0].data[1].physicalDeltaY, equals(0));
packets.clear();
// Again two pointers down (reuse pointer ID)
context.multiTouchDown(const <_TouchDetails>[
_TouchDetails(pointer: 3, clientX: 500, clientY: 501),
_TouchDetails(pointer: 2, clientX: 600, clientY: 601),
]).forEach(rootElement.dispatchEvent);
expect(packets.length, 2);
expect(packets[0].data.length, 2);
expect(packets[1].data.length, 2);
data = _allPointerData(packets);
expect(data, hasLength(4));
expect(data[0].change, equals(ui.PointerChange.add));
expect(data[0].synthesized, isTrue);
expect(data[0].device, equals(3));
expect(data[0].physicalX, equals(500 * dpi));
expect(data[0].physicalY, equals(501 * dpi));
expect(data[1].change, equals(ui.PointerChange.down));
expect(data[1].device, equals(3));
expect(data[1].buttons, equals(1));
expect(data[1].physicalX, equals(500 * dpi));
expect(data[1].physicalY, equals(501 * dpi));
expect(data[1].physicalDeltaX, equals(0));
expect(data[1].physicalDeltaY, equals(0));
expect(data[2].change, equals(ui.PointerChange.add));
expect(data[2].synthesized, isTrue);
expect(data[2].device, equals(2));
expect(data[2].physicalX, equals(600 * dpi));
expect(data[2].physicalY, equals(601 * dpi));
expect(data[3].change, equals(ui.PointerChange.down));
expect(data[3].device, equals(2));
expect(data[3].buttons, equals(1));
expect(data[3].physicalX, equals(600 * dpi));
expect(data[3].physicalY, equals(601 * dpi));
expect(data[3].physicalDeltaX, equals(0));
expect(data[3].physicalDeltaY, equals(0));
packets.clear();
},
);
test(
'correctly parses cancel event',
() {
final _MultiPointerEventMixin context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
// Two pointers down
context.multiTouchDown(const <_TouchDetails>[
_TouchDetails(pointer: 2, clientX: 100, clientY: 101),
_TouchDetails(pointer: 3, clientX: 200, clientY: 201),
]).forEach(rootElement.dispatchEvent);
packets.clear(); // Down event is tested in other tests.
// One pointer cancel
context.multiTouchCancel(const <_TouchDetails>[
_TouchDetails(pointer: 3, clientX: 300, clientY: 302),
]).forEach(rootElement.dispatchEvent);
expect(packets.length, 1);
expect(packets[0].data.length, 2);
expect(packets[0].data[0].change, equals(ui.PointerChange.cancel));
expect(packets[0].data[0].device, equals(3));
expect(packets[0].data[0].buttons, equals(0));
expect(packets[0].data[0].physicalX, equals(200 * dpi));
expect(packets[0].data[0].physicalY, equals(201 * dpi));
expect(packets[0].data[0].physicalDeltaX, equals(0));
expect(packets[0].data[0].physicalDeltaY, equals(0));
expect(packets[0].data[1].change, equals(ui.PointerChange.remove));
expect(packets[0].data[1].device, equals(3));
expect(packets[0].data[1].buttons, equals(0));
expect(packets[0].data[1].physicalX, equals(200 * dpi));
expect(packets[0].data[1].physicalY, equals(201 * dpi));
expect(packets[0].data[1].physicalDeltaX, equals(0));
expect(packets[0].data[1].physicalDeltaY, equals(0));
packets.clear();
},
);
test(
'does not synthesize pointer up if from different device',
() {
final _PointerEventContext context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
context.multiTouchDown(const <_TouchDetails>[
_TouchDetails(pointer: 1, clientX: 100, clientY: 101),
]).forEach(rootElement.dispatchEvent);
expect(packets, hasLength(1));
// An add will be synthesized.
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].device, equals(1));
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].device, equals(1));
packets.clear();
context.multiTouchDown(const <_TouchDetails>[
_TouchDetails(pointer: 2, clientX: 200, clientY: 202),
]).forEach(rootElement.dispatchEvent);
// An add will be synthesized.
expect(packets, hasLength(1));
expect(packets[0].data, hasLength(2));
expect(packets[0].data[0].change, equals(ui.PointerChange.add));
expect(packets[0].data[0].synthesized, isTrue);
expect(packets[0].data[0].device, equals(2));
expect(packets[0].data[1].change, equals(ui.PointerChange.down));
expect(packets[0].data[1].device, equals(2));
packets.clear();
},
);
test(
'ignores pointer up or pointer cancel events for unknown device',
() {
final _PointerEventContext context = _PointerEventContext();
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
context.multiTouchUp(const <_TouchDetails>[
_TouchDetails(pointer: 23, clientX: 200, clientY: 202),
]).forEach(rootElement.dispatchEvent);
expect(packets, hasLength(0));
context.multiTouchCancel(const <_TouchDetails>[
_TouchDetails(pointer: 24, clientX: 200, clientY: 202),
]).forEach(rootElement.dispatchEvent);
expect(packets, hasLength(0));
},
);
test(
'handles random pointer id on up events',
() {
final _PointerEventContext context = _PointerEventContext();
// This happens with pens that are simulated with mouse events
// (e.g. Wacom). It sends events with the pointer type "mouse", and
// assigns a random pointer ID to each event.
//
// For more info, see: https://github.com/flutter/flutter/issues/75559
final List<ui.PointerDataPacket> packets = <ui.PointerDataPacket>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
packets.add(packet);
};
rootElement.dispatchEvent(context.mouseDown(
pointerId: 12,
button: 0,
buttons: 1,
clientX: 10.0,
clientY: 10.0,
));
expect(packets, hasLength(1));
expect(packets.single.data, hasLength(2));
expect(packets.single.data[0].change, equals(ui.PointerChange.add));
expect(packets.single.data[0].synthesized, isTrue);
expect(packets.single.data[1].change, equals(ui.PointerChange.down));
packets.clear();
expect(
() {
rootElement.dispatchEvent(context.mouseUp(
pointerId: 41,
button: 0,
buttons: 0,
clientX: 10.0,
clientY: 10.0,
));
},
returnsNormally,
);
expect(packets, hasLength(1));
expect(packets.single.data, hasLength(1));
expect(packets.single.data[0].change, equals(ui.PointerChange.up));
},
);
test('throws if browser does not support pointer events', () {
expect(
() => PointerBinding(view, detector: MockPointerSupportDetector(false)),
throwsUnsupportedError,
);
});
group('ClickDebouncer', () {
_testClickDebouncer(getBinding: () => instance);
});
}
typedef CapturedSemanticsEvent = ({
ui.SemanticsAction type,
int nodeId,
});
void _testClickDebouncer({required PointerBinding Function() getBinding}) {
final DateTime testTime = DateTime(2018, 12, 17);
late List<ui.PointerChange> pointerPackets;
late List<CapturedSemanticsEvent> semanticsActions;
late _PointerEventContext context;
late PointerBinding binding;
void testWithSemantics(
String description,
Future<void> Function() body, {
Object? skip,
}) {
test(
description,
() async {
EngineSemantics.instance
..debugOverrideTimestampFunction(() => testTime)
..semanticsEnabled = true;
await body();
EngineSemantics.instance.semanticsEnabled = false;
},
skip: skip,
);
}
setUp(() {
context = _PointerEventContext();
pointerPackets = <ui.PointerChange>[];
semanticsActions = <CapturedSemanticsEvent>[];
ui.PlatformDispatcher.instance.onPointerDataPacket = (ui.PointerDataPacket packet) {
for (final ui.PointerData data in packet.data) {
pointerPackets.add(data.change);
}
};
EnginePlatformDispatcher.instance.onSemanticsActionEvent = (ui.SemanticsActionEvent event) {
semanticsActions.add((type: event.type, nodeId: event.nodeId));
};
binding = getBinding();
});
test('Forwards to framework when semantics is off', () {
expect(EnginePlatformDispatcher.instance.semanticsEnabled, false);
expect(PointerBinding.clickDebouncer.isDebouncing, false);
binding.rootElement.dispatchEvent(context.primaryDown());
expect(pointerPackets, <ui.PointerChange>[
ui.PointerChange.add,
ui.PointerChange.down,
]);
expect(PointerBinding.clickDebouncer.isDebouncing, false);
expect(semanticsActions, isEmpty);
});
testWithSemantics('Forwards to framework when not debouncing', () async {
expect(EnginePlatformDispatcher.instance.semanticsEnabled, true);
expect(PointerBinding.clickDebouncer.isDebouncing, false);
// This test DOM element is missing the `flt-tappable` attribute on purpose
// so that the debouncer does not debounce events and simply lets
// everything through.
final DomElement testElement = createDomElement('flt-semantics');
view.dom.semanticsHost.appendChild(testElement);
testElement.dispatchEvent(context.primaryDown());
testElement.dispatchEvent(context.primaryUp());
expect(PointerBinding.clickDebouncer.isDebouncing, false);
expect(pointerPackets, <ui.PointerChange>[
ui.PointerChange.add,
ui.PointerChange.down,
ui.PointerChange.up,
]);
expect(semanticsActions, isEmpty);
});
testWithSemantics('Accumulates pointer events starting from pointerdown', () async {
expect(EnginePlatformDispatcher.instance.semanticsEnabled, true);
expect(PointerBinding.clickDebouncer.isDebouncing, false);
final DomElement testElement = createDomElement('flt-semantics');
testElement.setAttribute('flt-tappable', '');
view.dom.semanticsHost.appendChild(testElement);
testElement.dispatchEvent(context.primaryDown());
expect(
reason: 'Should start debouncing at first pointerdown',
PointerBinding.clickDebouncer.isDebouncing,
true,
);
testElement.dispatchEvent(context.primaryUp());
expect(
reason: 'Should still be debouncing after pointerup',
PointerBinding.clickDebouncer.isDebouncing,
true,
);
expect(
reason: 'Events are withheld from the framework while debouncing',
pointerPackets,
<ui.PointerChange>[],
);
expect(
PointerBinding.clickDebouncer.debugState!.target,
testElement,
);
expect(
PointerBinding.clickDebouncer.debugState!.timer.isActive,
isTrue,
);
expect(
PointerBinding.clickDebouncer.debugState!.queue.map<String>((QueuedEvent e) => e.event.type),
<String>['pointerdown', 'pointerup'],
);
await Future<void>.delayed(const Duration(milliseconds: 250));
expect(
reason: 'Should stop debouncing after timer expires.',
PointerBinding.clickDebouncer.isDebouncing,
false,
);
expect(
reason: 'Queued up events should be flushed to the framework.',
pointerPackets,
<ui.PointerChange>[
ui.PointerChange.add,
ui.PointerChange.down,
ui.PointerChange.up,
],
);
expect(semanticsActions, isEmpty);
});
testWithSemantics('Flushes events to framework when target changes', () async {
expect(EnginePlatformDispatcher.instance.semanticsEnabled, true);
expect(PointerBinding.clickDebouncer.isDebouncing, false);
final DomElement testElement = createDomElement('flt-semantics');
testElement.setAttribute('flt-tappable', '');
view.dom.semanticsHost.appendChild(testElement);
testElement.dispatchEvent(context.primaryDown());
expect(
reason: 'Should start debouncing at first pointerdown',
PointerBinding.clickDebouncer.isDebouncing,
true,
);
final DomElement newTarget = createDomElement('flt-semantics');
newTarget.setAttribute('flt-tappable', '');
view.dom.semanticsHost.appendChild(newTarget);
newTarget.dispatchEvent(context.primaryUp());
expect(
reason: 'Should stop debouncing when target changes.',
PointerBinding.clickDebouncer.isDebouncing,
false,
);
expect(
reason: 'The state should be cleaned up after stopping debouncing.',
PointerBinding.clickDebouncer.debugState,
isNull,
);
expect(
reason: 'Queued up events should be flushed to the framework.',
pointerPackets,
<ui.PointerChange>[
ui.PointerChange.add,
ui.PointerChange.down,
ui.PointerChange.up,
],
);
expect(semanticsActions, isEmpty);
});
testWithSemantics('Forwards click to framework when not debouncing but listening', () async {
expect(PointerBinding.clickDebouncer.isDebouncing, false);
final DomElement testElement = createDomElement('flt-semantics');
testElement.setAttribute('flt-tappable', '');
view.dom.semanticsHost.appendChild(testElement);
final DomEvent click = createDomMouseEvent(
'click',
<Object?, Object?>{
'clientX': testElement.getBoundingClientRect().x,
'clientY': testElement.getBoundingClientRect().y,
}
);
PointerBinding.clickDebouncer.onClick(click, 42, true);
expect(PointerBinding.clickDebouncer.isDebouncing, false);
expect(pointerPackets, isEmpty);
expect(semanticsActions, <CapturedSemanticsEvent>[
(type: ui.SemanticsAction.tap, nodeId: 42)
]);
});
testWithSemantics('Forwards click to framework when debouncing and listening', () async {
expect(PointerBinding.clickDebouncer.isDebouncing, false);
final DomElement testElement = createDomElement('flt-semantics');
testElement.setAttribute('flt-tappable', '');
view.dom.semanticsHost.appendChild(testElement);
testElement.dispatchEvent(context.primaryDown());
expect(PointerBinding.clickDebouncer.isDebouncing, true);
final DomEvent click = createDomMouseEvent(
'click',
<Object?, Object?>{
'clientX': testElement.getBoundingClientRect().x,
'clientY': testElement.getBoundingClientRect().y,
}
);
PointerBinding.clickDebouncer.onClick(click, 42, true);
expect(pointerPackets, isEmpty);
expect(semanticsActions, <CapturedSemanticsEvent>[
(type: ui.SemanticsAction.tap, nodeId: 42)
]);
});
testWithSemantics('Dedupes click if debouncing but not listening', () async {
expect(PointerBinding.clickDebouncer.isDebouncing, false);
final DomElement testElement = createDomElement('flt-semantics');
testElement.setAttribute('flt-tappable', '');
view.dom.semanticsHost.appendChild(testElement);
testElement.dispatchEvent(context.primaryDown());
expect(PointerBinding.clickDebouncer.isDebouncing, true);
final DomEvent click = createDomMouseEvent(
'click',
<Object?, Object?>{
'clientX': testElement.getBoundingClientRect().x,
'clientY': testElement.getBoundingClientRect().y,
}
);
PointerBinding.clickDebouncer.onClick(click, 42, false);
expect(
reason: 'When tappable declares that it is not listening to click events '
'the debouncer flushes the pointer events to the framework and '
'lets it sort it out.',
pointerPackets,
<ui.PointerChange>[
ui.PointerChange.add,
ui.PointerChange.down,
],
);
expect(semanticsActions, isEmpty);
});
testWithSemantics('Dedupes click if pointer down/up flushed recently', () async {
expect(EnginePlatformDispatcher.instance.semanticsEnabled, true);
expect(PointerBinding.clickDebouncer.isDebouncing, false);
final DomElement testElement = createDomElement('flt-semantics');
testElement.setAttribute('flt-tappable', '');
view.dom.semanticsHost.appendChild(testElement);
testElement.dispatchEvent(context.primaryDown());
// Simulate the user holding the pointer down for some time before releasing,
// such that the pointerup event happens close to timer expiration. This
// will create the situation that the click event arrives just after the
// pointerup is flushed. Forwarding the click to the framework would look
// like a double-click, so the click event is deduped.
await Future<void>.delayed(const Duration(milliseconds: 190));
testElement.dispatchEvent(context.primaryUp());
expect(PointerBinding.clickDebouncer.isDebouncing, true);
expect(
reason: 'Timer has not expired yet',
pointerPackets, isEmpty,
);
// Wait for the timer to expire to make sure pointer events are flushed.
await Future<void>.delayed(const Duration(milliseconds: 20));
expect(
reason: 'Queued up events should be flushed to the framework because the '
'time expired before the click event arrived.',
pointerPackets,
<ui.PointerChange>[
ui.PointerChange.add,
ui.PointerChange.down,
ui.PointerChange.up,
],
);
final DomEvent click = createDomMouseEvent(
'click',
<Object?, Object?>{
'clientX': testElement.getBoundingClientRect().x,
'clientY': testElement.getBoundingClientRect().y,
}
);
PointerBinding.clickDebouncer.onClick(click, 42, true);
expect(
reason: 'Because the DOM click event was deduped.',
semanticsActions,
isEmpty,
);
// TODO(yjbanov): https://github.com/flutter/flutter/issues/142991.
}, skip: operatingSystem == OperatingSystem.windows);
testWithSemantics('Forwards click if enough time passed after the last flushed pointerup', () async {
expect(EnginePlatformDispatcher.instance.semanticsEnabled, true);
expect(PointerBinding.clickDebouncer.isDebouncing, false);
final DomElement testElement = createDomElement('flt-semantics');
testElement.setAttribute('flt-tappable', '');
view.dom.semanticsHost.appendChild(testElement);
testElement.dispatchEvent(context.primaryDown());
// Simulate the user holding the pointer down for some time before releasing,
// such that the pointerup event happens close to timer expiration. This
// makes it possible for the click to arrive early. However, this test in
// particular will delay the click to check that the delay is checked
// correctly. The inverse situation was already tested in the previous test.
await Future<void>.delayed(const Duration(milliseconds: 190));
testElement.dispatchEvent(context.primaryUp());
expect(PointerBinding.clickDebouncer.isDebouncing, true);
expect(
reason: 'Timer has not expired yet',
pointerPackets, isEmpty,
);
// Wait for the timer to expire to make sure pointer events are flushed.
await Future<void>.delayed(const Duration(milliseconds: 100));
expect(
reason: 'Queued up events should be flushed to the framework because the '
'time expired before the click event arrived.',
pointerPackets,
<ui.PointerChange>[
ui.PointerChange.add,
ui.PointerChange.down,
ui.PointerChange.up,
],
);
final DomEvent click = createDomMouseEvent(
'click',
<Object?, Object?>{
'clientX': testElement.getBoundingClientRect().x,
'clientY': testElement.getBoundingClientRect().y,
}
);
PointerBinding.clickDebouncer.onClick(click, 42, true);
expect(
reason: 'The DOM click should still be sent to the framework because it '
'happened far enough from the last pointerup that it is unlikely '
'to be a duplicate.',
semanticsActions,
<CapturedSemanticsEvent>[
(type: ui.SemanticsAction.tap, nodeId: 42)
],
);
});
}
class MockSafariPointerEventWorkaround implements SafariPointerEventWorkaround {
bool workAroundInvoked = false;
@override
void workAroundMissingPointerEvents() {
workAroundInvoked = true;
}
@override
void dispose() {}
}
abstract class _BasicEventContext {
String get name;
// Accepted modifier keys are 'Alt', 'Control', 'Meta' and 'Shift'.
// https://www.w3.org/TR/uievents-key/#keys-modifier defines more modifiers,
// but only the four main modifiers could be set from MouseEvent, PointerEvent
// and TouchEvent constructors.
bool altPressed = false;
bool ctrlPressed = false;
bool metaPressed = false;
bool shiftPressed = false;
// Generate an event that is:
//
// * For mouse, a left click
// * For touch, a touch down
DomEvent primaryDown({double clientX, double clientY});
// Generate an event that is:
//
// * For mouse, a drag with LMB down
// * For touch, a touch drag
DomEvent primaryMove({double clientX, double clientY});
// Generate an event that is:
//
// * For mouse, release LMB
// * For touch, a touch up
DomEvent primaryUp({double clientX, double clientY});
void pressAllModifiers() {
altPressed = true;
ctrlPressed = true;
metaPressed = true;
shiftPressed = true;
}
void unpressAllModifiers() {
altPressed = false;
ctrlPressed = false;
metaPressed = false;
shiftPressed = false;
}
}
mixin _ButtonedEventMixin on _BasicEventContext {
// Generate an event that is a mouse down with the specific buttons.
DomEvent mouseDown(
{double? clientX, double? clientY, int? button, int? buttons});
// Generate an event that is a mouse drag with the specific buttons, or button
// changes during the drag.
//
// If there is no button change, assign `button` with _kNoButtonChange.
DomEvent mouseMove(
{double? clientX,
double? clientY,
required int button,
required int buttons});
// Generate an event that moves the mouse outside of the tracked area.
DomEvent mouseLeave({double? clientX, double? clientY, required int buttons});
// Generate an event that releases all mouse buttons.
DomEvent mouseUp({double? clientX, double? clientY, int? button, int? buttons});
DomEvent hover({double? clientX, double? clientY}) {
return mouseMove(
buttons: 0,
button: _kNoButtonChange,
clientX: clientX,
clientY: clientY,
);
}
@override
DomEvent primaryDown({double? clientX, double? clientY}) {
return mouseDown(
buttons: 1,
button: 0,
clientX: clientX,
clientY: clientY,
);
}
@override
DomEvent primaryMove({double? clientX, double? clientY}) {
return mouseMove(
buttons: 1,
button: _kNoButtonChange,
clientX: clientX,
clientY: clientY,
);
}
@override
DomEvent primaryUp({double? clientX, double? clientY}) {
return mouseUp(
button: 0,
clientX: clientX,
clientY: clientY,
);
}
DomEvent wheel({
required int? buttons,
required double? clientX,
required double? clientY,
required double? deltaX,
required double? deltaY,
double? wheelDeltaX,
double? wheelDeltaY,
int? timeStamp,
bool ctrlKey = false,
}) {
final DomEvent event = createDomWheelEvent('wheel', <String, Object>{
if (buttons != null) 'buttons': buttons,
if (clientX != null) 'clientX': clientX,
if (clientY != null) 'clientY': clientY,
if (deltaX != null) 'deltaX': deltaX,
if (deltaY != null) 'deltaY': deltaY,
if (wheelDeltaX != null) 'wheelDeltaX': wheelDeltaX,
if (wheelDeltaY != null) 'wheelDeltaY': wheelDeltaY,
'ctrlKey': ctrlKey,
});
// timeStamp can't be set in the constructor, need to override the getter.
if (timeStamp != null) {
js_util.callMethod<void>(
objectConstructor,
'defineProperty',
<dynamic>[
event,
'timeStamp',
js_util.jsify(<String, dynamic>{
'value': timeStamp,
'configurable': true
})
]
);
}
return event;
}
}
class _TouchDetails {
const _TouchDetails({this.pointer, this.clientX, this.clientY});
final int? pointer;
final double? clientX;
final double? clientY;
}
mixin _MultiPointerEventMixin on _BasicEventContext {
List<DomEvent> multiTouchDown(List<_TouchDetails> touches);
List<DomEvent> multiTouchMove(List<_TouchDetails> touches);
List<DomEvent> multiTouchUp(List<_TouchDetails> touches);
List<DomEvent> multiTouchCancel(List<_TouchDetails> touches);
@override
DomEvent primaryDown({double? clientX, double? clientY}) {
return multiTouchDown(<_TouchDetails>[
_TouchDetails(
pointer: 1,
clientX: clientX,
clientY: clientY,
),
])[0];
}
@override
DomEvent primaryMove({double? clientX, double? clientY}) {
return multiTouchMove(<_TouchDetails>[
_TouchDetails(
pointer: 1,
clientX: clientX,
clientY: clientY,
),
])[0];
}
@override
DomEvent primaryUp({double? clientX, double? clientY}) {
return multiTouchUp(<_TouchDetails>[
_TouchDetails(
pointer: 1,
clientX: clientX,
clientY: clientY,
),
])[0];
}
}
// A test context for `_PointerAdapter`, including its name, and how to generate
// events.
//
// For the difference between MouseEvent and PointerEvent, see _MouseAdapter.
class _PointerEventContext extends _BasicEventContext
with _ButtonedEventMixin
implements _MultiPointerEventMixin {
@override
String get name => 'PointerAdapter';
@override
List<DomEvent> multiTouchDown(List<_TouchDetails> touches) {
return touches
.map((_TouchDetails details) => _downWithFullDetails(
pointer: details.pointer,
buttons: 1,
button: 0,
clientX: details.clientX,
clientY: details.clientY,
pointerType: 'touch',
))
.toList();
}
@override
DomEvent mouseDown({
double? clientX,
double? clientY,
int? button,
int? buttons,
int? pointerId = 1,
}) {
return _downWithFullDetails(
pointer: pointerId,
buttons: buttons,
button: button,
clientX: clientX,
clientY: clientY,
pointerType: 'mouse',
);
}
DomEvent _downWithFullDetails({
double? clientX,
double? clientY,
int? button,
int? buttons,
int? pointer,
String? pointerType,
}) {
return createDomPointerEvent('pointerdown', <String, dynamic>{
'bubbles': true,
'pointerId': pointer,
'button': button,
'buttons': buttons,
'clientX': clientX,
'clientY': clientY,
'pointerType': pointerType,
'altKey': altPressed,
'ctrlKey': ctrlPressed,
'metaKey': metaPressed,
'shiftKey': shiftPressed,
});
}
@override
List<DomEvent> multiTouchMove(List<_TouchDetails> touches) {
return touches
.map((_TouchDetails details) => _moveWithFullDetails(
pointer: details.pointer,
buttons: 1,
button: _kNoButtonChange,
clientX: details.clientX,
clientY: details.clientY,
pointerType: 'touch',
))
.toList();
}
@override
DomEvent mouseMove({
double? clientX,
double? clientY,
required int button,
required int buttons,
int pointerId = 1,
}) {
return _moveWithFullDetails(
pointer: pointerId,
buttons: buttons,
button: button,
clientX: clientX,
clientY: clientY,
pointerType: 'mouse',
);
}
DomEvent _moveWithFullDetails({
double? clientX,
double? clientY,
int? button,
int? buttons,
int? pointer,
String? pointerType,
}) {
return createDomPointerEvent('pointermove', <String, dynamic>{
'bubbles': true,
'pointerId': pointer,
'button': button,
'buttons': buttons,
'clientX': clientX,
'clientY': clientY,
'pointerType': pointerType,
});
}
@override
DomEvent mouseLeave({
double? clientX,
double? clientY,
required int buttons,
int pointerId = 1,
}) {
return _leaveWithFullDetails(
pointer: pointerId,
buttons: buttons,
button: 0,
clientX: clientX,
clientY: clientY,
pointerType: 'mouse',
);
}
DomEvent _leaveWithFullDetails({
double? clientX,
double? clientY,
int? button,
int? buttons,
int? pointer,
String? pointerType,
}) {
return createDomPointerEvent('pointerleave', <String, dynamic>{
'bubbles': true,
'pointerId': pointer,
'button': button,
'buttons': buttons,
'clientX': clientX,
'clientY': clientY,
'pointerType': pointerType,
});
}
@override
List<DomEvent> multiTouchUp(List<_TouchDetails> touches) {
return touches
.map((_TouchDetails details) => _upWithFullDetails(
pointer: details.pointer,
button: 0,
clientX: details.clientX,
clientY: details.clientY,
pointerType: 'touch',
))
.toList();
}
@override
DomEvent mouseUp({
double? clientX,
double? clientY,
int? button,
int? buttons,
int? pointerId = 1,
}) {
return _upWithFullDetails(
pointer: pointerId,
button: button,
buttons: buttons,
clientX: clientX,
clientY: clientY,
pointerType: 'mouse',
);
}
DomEvent _upWithFullDetails({
double? clientX,
double? clientY,
int? button,
int? buttons,
int? pointer,
String? pointerType,
}) {
return createDomPointerEvent('pointerup', <String, dynamic>{
'bubbles': true,
'pointerId': pointer,
'button': button,
'buttons': buttons,
'clientX': clientX,
'clientY': clientY,
'pointerType': pointerType,
});
}
@override
List<DomEvent> multiTouchCancel(List<_TouchDetails> touches) {
return touches
.map((_TouchDetails details) =>
createDomPointerEvent('pointercancel', <String, dynamic>{
'bubbles': true,
'pointerId': details.pointer,
'button': 0,
'buttons': 0,
'clientX': 0,
'clientY': 0,
'pointerType': 'touch',
}))
.toList();
}
}
class MockPointerSupportDetector implements PointerSupportDetector {
MockPointerSupportDetector(this.hasPointerEvents);
@override
final bool hasPointerEvents;
}
| engine/lib/web_ui/test/engine/pointer_binding_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/pointer_binding_test.dart",
"repo_id": "engine",
"token_count": 51458
} | 306 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:typed_data';
import 'package:test/test.dart';
import 'package:ui/src/engine/dom.dart';
import 'package:ui/src/engine/semantics.dart';
import 'package:ui/src/engine/vector_math.dart';
import 'package:ui/ui.dart' as ui;
import '../../common/matchers.dart';
/// CSS style applied to the root of the semantics tree.
// TODO(yjbanov): this should be handled internally by [expectSemanticsTree].
// No need for every test to inject it.
const String rootSemanticStyle = 'filter: opacity(0%); color: rgba(0, 0, 0, 0)';
/// A convenience wrapper of the semantics API for building and inspecting the
/// semantics tree in unit tests.
class SemanticsTester {
SemanticsTester(this.owner);
final EngineSemanticsOwner owner;
final List<SemanticsNodeUpdate> _nodeUpdates = <SemanticsNodeUpdate>[];
/// Updates one semantics node.
///
/// Provides reasonable defaults for the missing attributes, and conveniences
/// for specifying flags, such as [isTextField].
SemanticsNodeUpdate updateNode({
required int id,
// Flags
int flags = 0,
bool? hasCheckedState,
bool? isChecked,
bool? isSelected,
bool? isButton,
bool? isLink,
bool? isTextField,
bool? isReadOnly,
bool? isFocusable,
bool? isFocused,
bool? hasEnabledState,
bool? isEnabled,
bool? isInMutuallyExclusiveGroup,
bool? isHeader,
bool? isObscured,
bool? scopesRoute,
bool? namesRoute,
bool? isHidden,
bool? isImage,
bool? isLiveRegion,
bool? hasToggledState,
bool? isToggled,
bool? hasImplicitScrolling,
bool? isMultiline,
bool? isSlider,
bool? isKeyboardKey,
// Actions
int actions = 0,
bool? hasTap,
bool? hasLongPress,
bool? hasScrollLeft,
bool? hasScrollRight,
bool? hasScrollUp,
bool? hasScrollDown,
bool? hasIncrease,
bool? hasDecrease,
bool? hasShowOnScreen,
bool? hasMoveCursorForwardByCharacter,
bool? hasMoveCursorBackwardByCharacter,
bool? hasSetSelection,
bool? hasCopy,
bool? hasCut,
bool? hasPaste,
bool? hasDidGainAccessibilityFocus,
bool? hasDidLoseAccessibilityFocus,
bool? hasCustomAction,
bool? hasDismiss,
bool? hasMoveCursorForwardByWord,
bool? hasMoveCursorBackwardByWord,
bool? hasSetText,
// Other attributes
int? maxValueLength,
int? currentValueLength,
int? textSelectionBase,
int? textSelectionExtent,
int? platformViewId,
int? scrollChildren,
int? scrollIndex,
double? scrollPosition,
double? scrollExtentMax,
double? scrollExtentMin,
double? elevation,
double? thickness,
ui.Rect? rect,
String? identifier,
String? label,
List<ui.StringAttribute>? labelAttributes,
String? hint,
List<ui.StringAttribute>? hintAttributes,
String? value,
List<ui.StringAttribute>? valueAttributes,
String? increasedValue,
List<ui.StringAttribute>? increasedValueAttributes,
String? decreasedValue,
List<ui.StringAttribute>? decreasedValueAttributes,
String? tooltip,
ui.TextDirection? textDirection,
Float64List? transform,
Int32List? additionalActions,
List<SemanticsNodeUpdate>? children,
}) {
// Flags
if (hasCheckedState ?? false) {
flags |= ui.SemanticsFlag.hasCheckedState.index;
}
if (isChecked ?? false) {
flags |= ui.SemanticsFlag.isChecked.index;
}
if (isSelected ?? false) {
flags |= ui.SemanticsFlag.isSelected.index;
}
if (isButton ?? false) {
flags |= ui.SemanticsFlag.isButton.index;
}
if (isLink ?? false) {
flags |= ui.SemanticsFlag.isLink.index;
}
if (isTextField ?? false) {
flags |= ui.SemanticsFlag.isTextField.index;
}
if (isReadOnly ?? false) {
flags |= ui.SemanticsFlag.isReadOnly.index;
}
if (isFocusable ?? false) {
flags |= ui.SemanticsFlag.isFocusable.index;
}
if (isFocused ?? false) {
flags |= ui.SemanticsFlag.isFocused.index;
}
if (hasEnabledState ?? false) {
flags |= ui.SemanticsFlag.hasEnabledState.index;
}
if (isEnabled ?? false) {
flags |= ui.SemanticsFlag.isEnabled.index;
}
if (isInMutuallyExclusiveGroup ?? false) {
flags |= ui.SemanticsFlag.isInMutuallyExclusiveGroup.index;
}
if (isHeader ?? false) {
flags |= ui.SemanticsFlag.isHeader.index;
}
if (isObscured ?? false) {
flags |= ui.SemanticsFlag.isObscured.index;
}
if (scopesRoute ?? false) {
flags |= ui.SemanticsFlag.scopesRoute.index;
}
if (namesRoute ?? false) {
flags |= ui.SemanticsFlag.namesRoute.index;
}
if (isHidden ?? false) {
flags |= ui.SemanticsFlag.isHidden.index;
}
if (isImage ?? false) {
flags |= ui.SemanticsFlag.isImage.index;
}
if (isLiveRegion ?? false) {
flags |= ui.SemanticsFlag.isLiveRegion.index;
}
if (hasToggledState ?? false) {
flags |= ui.SemanticsFlag.hasToggledState.index;
}
if (isToggled ?? false) {
flags |= ui.SemanticsFlag.isToggled.index;
}
if (hasImplicitScrolling ?? false) {
flags |= ui.SemanticsFlag.hasImplicitScrolling.index;
}
if (isMultiline ?? false) {
flags |= ui.SemanticsFlag.isMultiline.index;
}
if (isSlider ?? false) {
flags |= ui.SemanticsFlag.isSlider.index;
}
if (isKeyboardKey ?? false) {
flags |= ui.SemanticsFlag.isKeyboardKey.index;
}
// Actions
if (hasTap ?? false) {
actions |= ui.SemanticsAction.tap.index;
}
if (hasLongPress ?? false) {
actions |= ui.SemanticsAction.longPress.index;
}
if (hasScrollLeft ?? false) {
actions |= ui.SemanticsAction.scrollLeft.index;
}
if (hasScrollRight ?? false) {
actions |= ui.SemanticsAction.scrollRight.index;
}
if (hasScrollUp ?? false) {
actions |= ui.SemanticsAction.scrollUp.index;
}
if (hasScrollDown ?? false) {
actions |= ui.SemanticsAction.scrollDown.index;
}
if (hasIncrease ?? false) {
actions |= ui.SemanticsAction.increase.index;
}
if (hasDecrease ?? false) {
actions |= ui.SemanticsAction.decrease.index;
}
if (hasShowOnScreen ?? false) {
actions |= ui.SemanticsAction.showOnScreen.index;
}
if (hasMoveCursorForwardByCharacter ?? false) {
actions |= ui.SemanticsAction.moveCursorForwardByCharacter.index;
}
if (hasMoveCursorBackwardByCharacter ?? false) {
actions |= ui.SemanticsAction.moveCursorBackwardByCharacter.index;
}
if (hasSetSelection ?? false) {
actions |= ui.SemanticsAction.setSelection.index;
}
if (hasCopy ?? false) {
actions |= ui.SemanticsAction.copy.index;
}
if (hasCut ?? false) {
actions |= ui.SemanticsAction.cut.index;
}
if (hasPaste ?? false) {
actions |= ui.SemanticsAction.paste.index;
}
if (hasDidGainAccessibilityFocus ?? false) {
actions |= ui.SemanticsAction.didGainAccessibilityFocus.index;
}
if (hasDidLoseAccessibilityFocus ?? false) {
actions |= ui.SemanticsAction.didLoseAccessibilityFocus.index;
}
if (hasCustomAction ?? false) {
actions |= ui.SemanticsAction.customAction.index;
}
if (hasDismiss ?? false) {
actions |= ui.SemanticsAction.dismiss.index;
}
if (hasMoveCursorForwardByWord ?? false) {
actions |= ui.SemanticsAction.moveCursorForwardByWord.index;
}
if (hasMoveCursorBackwardByWord ?? false) {
actions |= ui.SemanticsAction.moveCursorBackwardByWord.index;
}
if (hasSetText ?? false) {
actions |= ui.SemanticsAction.setText.index;
}
// Other attributes
ui.Rect childRect(SemanticsNodeUpdate child) {
return Matrix4.fromFloat32List(child.transform).transformRect(child.rect);
}
// If a rect is not provided, generate one than covers all children.
ui.Rect effectiveRect = rect ?? ui.Rect.zero;
if (children != null && children.isNotEmpty) {
effectiveRect = childRect(children.first);
for (final SemanticsNodeUpdate child in children.skip(1)) {
effectiveRect = effectiveRect.expandToInclude(childRect(child));
}
}
final Int32List childIds = Int32List(children?.length ?? 0);
if (children != null) {
for (int i = 0; i < children.length; i++) {
childIds[i] = children[i].id;
}
}
final SemanticsNodeUpdate update = SemanticsNodeUpdate(
id: id,
flags: flags,
actions: actions,
maxValueLength: maxValueLength ?? 0,
currentValueLength: currentValueLength ?? 0,
textSelectionBase: textSelectionBase ?? 0,
textSelectionExtent: textSelectionExtent ?? 0,
platformViewId: platformViewId ?? -1,
scrollChildren: scrollChildren ?? 0,
scrollIndex: scrollIndex ?? 0,
scrollPosition: scrollPosition ?? 0,
scrollExtentMax: scrollExtentMax ?? 0,
scrollExtentMin: scrollExtentMin ?? 0,
rect: effectiveRect,
identifier: identifier ?? '',
label: label ?? '',
labelAttributes: labelAttributes ?? const <ui.StringAttribute>[],
hint: hint ?? '',
hintAttributes: hintAttributes ?? const <ui.StringAttribute>[],
value: value ?? '',
valueAttributes: valueAttributes ?? const <ui.StringAttribute>[],
increasedValue: increasedValue ?? '',
increasedValueAttributes: increasedValueAttributes ?? const <ui.StringAttribute>[],
decreasedValue: decreasedValue ?? '',
decreasedValueAttributes: decreasedValueAttributes ?? const <ui.StringAttribute>[],
tooltip: tooltip ?? '',
transform: transform != null ? toMatrix32(transform) : Matrix4.identity().storage,
elevation: elevation ?? 0,
thickness: thickness ?? 0,
childrenInTraversalOrder: childIds,
childrenInHitTestOrder: childIds,
additionalActions: additionalActions ?? Int32List(0),
);
_nodeUpdates.add(update);
return update;
}
/// Updates the HTML tree from semantics updates accumulated by this builder.
///
/// This builder forgets previous updates and may be reused in future updates.
Map<int, SemanticsObject> apply() {
owner.updateSemantics(SemanticsUpdate(nodeUpdates: _nodeUpdates));
_nodeUpdates.clear();
return owner.debugSemanticsTree!;
}
/// Locates the semantics object with the given [id].
SemanticsObject getSemanticsObject(int id) {
return owner.debugSemanticsTree![id]!;
}
/// Locates the [TextField] role manager of the semantics object with the give [id].
TextField getTextField(int id) {
return getSemanticsObject(id).primaryRole! as TextField;
}
void expectSemantics(String semanticsHtml) {
expectSemanticsTree(owner, semanticsHtml);
}
}
/// Verifies the HTML structure of the current semantics tree.
void expectSemanticsTree(EngineSemanticsOwner owner, String semanticsHtml) {
const List<String> ignoredStyleProperties = <String>['pointer-events'];
expect(
canonicalizeHtml(owner.semanticsHost.querySelector('flt-semantics')!.outerHTML!, ignoredStyleProperties: ignoredStyleProperties),
canonicalizeHtml(semanticsHtml),
);
}
/// Finds the first HTML element in the semantics tree used for scrolling.
DomElement findScrollable(EngineSemanticsOwner owner) {
return owner.semanticsHost.querySelectorAll('flt-semantics').singleWhere(
(DomElement? element) {
return element!.style.overflow == 'hidden' ||
element.style.overflowY == 'scroll' ||
element.style.overflowX == 'scroll';
},
);
}
/// Logs semantics actions dispatched to [ui.PlatformDispatcher].
class SemanticsActionLogger {
SemanticsActionLogger() {
_idLogController = StreamController<int>();
_actionLogController = StreamController<ui.SemanticsAction>();
_idLog = _idLogController.stream.asBroadcastStream();
_actionLog = _actionLogController.stream.asBroadcastStream();
// The browser kicks us out of the test zone when the browser event happens.
// We memorize the test zone so we can call expect when the callback is
// fired.
final Zone testZone = Zone.current;
ui.PlatformDispatcher.instance.onSemanticsActionEvent =
(ui.SemanticsActionEvent event) {
_idLogController.add(event.nodeId);
_actionLogController.add(event.type);
testZone.run(() {
expect(event.arguments, null);
});
};
}
late StreamController<int> _idLogController;
late StreamController<ui.SemanticsAction> _actionLogController;
/// Semantics object ids that dispatched the actions.
Stream<int> get idLog => _idLog;
late Stream<int> _idLog;
/// The actions that were dispatched to [ui.PlatformDispatcher].
Stream<ui.SemanticsAction> get actionLog => _actionLog;
late Stream<ui.SemanticsAction> _actionLog;
}
| engine/lib/web_ui/test/engine/semantics/semantics_tester.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/semantics/semantics_tester.dart",
"repo_id": "engine",
"token_count": 4876
} | 307 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
void main() {
internalBootstrapBrowserTest(() => doTests);
}
void doTests() {
group('Factory', () {
test('Creates a FullPage instance when hostElement is null', () async {
final DimensionsProvider provider = DimensionsProvider.create();
expect(provider, isA<FullPageDimensionsProvider>());
});
test('Creates a CustomElement instance when hostElement is not null',
() async {
final DomElement element = createDomElement('some-random-element');
final DimensionsProvider provider = DimensionsProvider.create(
hostElement: element,
);
expect(provider, isA<CustomElementDimensionsProvider>());
});
});
}
| engine/lib/web_ui/test/engine/view_embedder/dimensions_provider/dimensions_provider_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/view_embedder/dimensions_provider/dimensions_provider_test.dart",
"repo_id": "engine",
"token_count": 302
} | 308 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_util' as js_util;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart' as engine;
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' hide TextStyle;
import '../common/test_initialization.dart';
import 'screenshot.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
setUpTestViewDimensions: false,
);
// Regression test for https://github.com/flutter/flutter/issues/48683
// Should clip image with oval.
test('Clips image with oval clip path', () async {
final engine.RecordingCanvas rc =
engine.RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
final Path path = Path();
path.addOval(Rect.fromLTWH(100, 30, testWidth, testHeight));
rc.clipPath(path);
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTWH(100, 30, testWidth, testHeight), engine.SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'image_clipped_by_oval');
});
// Regression test for https://github.com/flutter/flutter/issues/48683
test('Clips triangle with oval clip path', () async {
final engine.RecordingCanvas rc =
engine.RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500));
rc.save();
const double testWidth = 200;
const double testHeight = 150;
final Path path = Path();
path.addOval(const Rect.fromLTWH(100, 30, testWidth, testHeight));
rc.clipPath(path);
final Path paintPath = Path();
paintPath.moveTo(testWidth / 2, 0);
paintPath.lineTo(testWidth, testHeight);
paintPath.lineTo(0, testHeight);
paintPath.close();
rc.drawPath(
paintPath,
engine.SurfacePaint()
..color = const Color(0xFF00FF00)
..style = PaintingStyle.fill);
rc.restore();
await canvasScreenshot(rc, 'triangle_clipped_by_oval');
});
// Regression test for https://github.com/flutter/flutter/issues/78782
test('Clips on Safari when clip bounds off screen', () async {
final engine.RecordingCanvas rc =
engine.RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500));
rc.save();
const double testWidth = 200;
const double testHeight = 150;
final Path paintPath = Path();
paintPath.addRect(const Rect.fromLTWH(-50, 0, testWidth, testHeight));
paintPath.close();
rc.drawPath(paintPath,
engine.SurfacePaint()
..color = const Color(0xFF000000)
..style = PaintingStyle.stroke);
final Path path = Path();
path.moveTo(-200, 0);
path.lineTo(100, 75);
path.lineTo(-200, 150);
path.close();
rc.clipPath(path);
rc.drawImageRect(createTestImage(), const Rect.fromLTRB(0, 0, testWidth, testHeight),
const Rect.fromLTWH(-50, 0, testWidth, testHeight), engine.SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'image_clipped_by_triangle_off_screen',
region: const Rect.fromLTWH(0, 0, 600, 800));
});
// Tests oval clipping using border radius 50%.
test('Clips against oval', () async {
final engine.RecordingCanvas rc =
engine.RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500));
rc.save();
const double testWidth = 200;
const double testHeight = 150;
final Path paintPath = Path();
paintPath.addRect(const Rect.fromLTWH(-50, 0, testWidth, testHeight));
paintPath.close();
rc.drawPath(paintPath,
engine.SurfacePaint()
..color = const Color(0xFF000000)
..style = PaintingStyle.stroke);
final Path path = Path();
path.addOval(const Rect.fromLTRB(-200, 0, 100, 150));
rc.clipPath(path);
rc.drawImageRect(createTestImage(), const Rect.fromLTRB(0, 0, testWidth, testHeight),
const Rect.fromLTWH(-50, 0, testWidth, testHeight), engine.SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'image_clipped_by_oval_path',
region: const Rect.fromLTWH(0, 0, 600, 800));
});
test('Clips with fillType evenOdd', () async {
final engine.RecordingCanvas rc = engine.RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500));
rc.save();
const double testWidth = 400;
const double testHeight = 350;
// draw RGB test image
rc.drawImageRect(createTestImage(), const Rect.fromLTRB(0, 0, testWidth, testHeight),
const Rect.fromLTWH(0, 0, testWidth, testHeight), engine.SurfacePaint());
// draw a clipping path with:
// 1) an outside larger rectangle
// 2) a smaller inner rectangle specified by a path
final Path path = Path();
path.addRect(const Rect.fromLTWH(0, 0, testWidth, testHeight));
const double left = 25;
const double top = 30;
const double right = 300;
const double bottom = 250;
path
..moveTo(left, top)
..lineTo(right,top)
..lineTo(right,bottom)
..lineTo(left, bottom)
..close();
path.fillType = PathFillType.evenOdd;
rc.clipPath(path);
// draw an orange paint path of size testWidth and testHeight
final Path paintPath = Path();
paintPath.addRect(const Rect.fromLTWH(0, 0, testWidth, testHeight));
paintPath.close();
rc.drawPath(paintPath,
engine.SurfacePaint()
..color = const Color(0xFFFF9800)
..style = PaintingStyle.fill);
rc.restore();
// when fillType is set to evenOdd from the clipping path, expect the inner
// rectangle should clip some of the orange painted portion, revealing the RGB testImage
await canvasScreenshot(rc, 'clipPath_uses_fillType_evenOdd',
region: const Rect.fromLTWH(0, 0, 600, 800));
});
}
engine.HtmlImage createTestImage({int width = 200, int height = 150}) {
final engine.DomCanvasElement canvas =
engine.createDomCanvasElement(width: width, height: height);
final engine.DomCanvasRenderingContext2D ctx = canvas.context2D;
ctx.fillStyle = '#E04040';
ctx.fillRect(0, 0, width / 3, height);
ctx.fill();
ctx.fillStyle = '#40E080';
ctx.fillRect(width / 3, 0, width / 3, height);
ctx.fill();
ctx.fillStyle = '#2040E0';
ctx.fillRect(2 * width / 3, 0, width / 3, height);
ctx.fill();
final engine.DomHTMLImageElement imageElement = engine.createDomHTMLImageElement();
imageElement.src = js_util.callMethod<String>(canvas, 'toDataURL', <dynamic>[]);
return engine.HtmlImage(imageElement, width, height);
}
| engine/lib/web_ui/test/html/canvas_clip_path_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/canvas_clip_path_golden_test.dart",
"repo_id": "engine",
"token_count": 2502
} | 309 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:convert';
import 'dart:js_util' as js_util;
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart';
import 'package:ui/ui_web/src/ui_web.dart' as ui_web;
import 'package:web_engine_tester/golden_tester.dart';
import '../../common/test_initialization.dart';
import '../screenshot.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
withImplicitView: true,
setUpTestViewDimensions: false,
);
test('Paints image', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
rc.drawImage(createTestImage(), Offset.zero, SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_image');
});
test('Images from raw data are composited when picture is roundtripped through toImage', () async {
final Uint8List imageData = base64Decode(base64PngData);
final Codec codec = await instantiateImageCodec(imageData);
final FrameInfo frameInfo = await codec.getNextFrame();
const Rect bounds = Rect.fromLTRB(0, 0, 400, 300);
final EnginePictureRecorder recorder = EnginePictureRecorder();
final RecordingCanvas scratchCanvas = recorder.beginRecording(bounds);
scratchCanvas.save();
scratchCanvas.drawImage(frameInfo.image, Offset.zero, SurfacePaint());
scratchCanvas.restore();
final Picture picture = recorder.endRecording();
final Image image = await picture.toImage(400, 300);
final RecordingCanvas rc = RecordingCanvas(bounds);
rc.save();
rc.drawImage(image, Offset.zero, SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_raw_image');
});
test('Paints image with transform', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
rc.translate(50.0, 100.0);
rc.rotate(math.pi / 4.0);
rc.drawImage(createTestImage(), Offset.zero, SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_image_with_transform');
});
test('Paints image with transform and offset', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
rc.translate(50.0, 100.0);
rc.rotate(math.pi / 4.0);
rc.drawImage(createTestImage(), const Offset(30, 20), SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_image_with_transform_and_offset');
});
test('Paints image with transform using destination', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
rc.translate(50.0, 100.0);
rc.rotate(math.pi / 4.0);
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight), SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_image_rect_with_transform');
});
test('Paints image with source and destination', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
rc.drawImageRect(
testImage,
Rect.fromLTRB(testWidth / 2, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight),
SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_image_rect_with_source');
});
test('Paints image with source and destination and round clip', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
rc.save();
rc.clipRRect(RRect.fromLTRBR(
100, 30, 2 * testWidth, 2 * testHeight, const Radius.circular(16)));
rc.drawImageRect(
testImage,
Rect.fromLTRB(testWidth / 2, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight),
SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_image_rect_with_source_and_clip');
});
test('Paints image with transform using source and destination', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
rc.translate(50.0, 100.0);
rc.rotate(math.pi / 6.0);
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
rc.drawImageRect(
testImage,
Rect.fromLTRB(testWidth / 2, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight),
SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_image_rect_with_transform_source');
});
// Regression test for https://github.com/flutter/flutter/issues/44845
// Circle should draw on top of image not below.
test('Paints on top of image', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight), SurfacePaint());
rc.drawCircle(
const Offset(100, 100),
50.0,
SurfacePaint()
..strokeWidth = 3
..color = const Color.fromARGB(128, 0, 0, 0));
rc.restore();
await canvasScreenshot(rc, 'draw_circle_on_image');
});
// Regression test for https://github.com/flutter/flutter/issues/44845
// Circle should below image not on top.
test('Paints below image', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
rc.drawCircle(
const Offset(100, 100),
50.0,
SurfacePaint()
..strokeWidth = 3
..color = const Color.fromARGB(128, 0, 0, 0));
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight), SurfacePaint());
rc.restore();
await canvasScreenshot(rc, 'draw_circle_below_image');
});
// Regression test for https://github.com/flutter/flutter/issues/44845
// Circle should draw on top of image with clip rect.
test('Paints on top of image with clip rect', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
rc.clipRect(const Rect.fromLTRB(75, 75, 160, 160), ClipOp.intersect);
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight), SurfacePaint());
rc.drawCircle(
const Offset(100, 100),
50.0,
SurfacePaint()
..strokeWidth = 3
..color = const Color.fromARGB(128, 0, 0, 0));
rc.restore();
await canvasScreenshot(rc, 'draw_circle_on_image_clip_rect');
});
// Regression test for https://github.com/flutter/flutter/issues/44845
// Circle should draw on top of image with clip rect and transform.
test('Paints on top of image with clip rect with transform', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
// Rotate around center of circle.
rc.translate(100, 100);
rc.rotate(math.pi / 4.0);
rc.translate(-100, -100);
rc.clipRect(const Rect.fromLTRB(75, 75, 160, 160), ClipOp.intersect);
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight), SurfacePaint());
rc.drawCircle(
const Offset(100, 100),
50.0,
SurfacePaint()
..strokeWidth = 3
..color = const Color.fromARGB(128, 0, 0, 0));
rc.restore();
await canvasScreenshot(rc, 'draw_circle_on_image_clip_rect_with_transform');
});
// Regression test for https://github.com/flutter/flutter/issues/44845
// Circle should draw on top of image with stack of clip rect and transforms.
test('Paints on top of image with clip rect with stack', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
// Rotate around center of circle.
rc.translate(100, 100);
rc.rotate(-math.pi / 4.0);
rc.save();
rc.translate(-100, -100);
rc.clipRect(const Rect.fromLTRB(75, 75, 160, 160), ClipOp.intersect);
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight), SurfacePaint());
rc.drawCircle(
const Offset(100, 100),
50.0,
SurfacePaint()
..strokeWidth = 3
..color = const Color.fromARGB(128, 0, 0, 0));
rc.restore();
rc.restore();
await canvasScreenshot(rc, 'draw_circle_on_image_clip_rect_with_stack');
});
// Regression test for https://github.com/flutter/flutter/issues/44845
// Circle should draw on top of image with clip rrect.
test('Paints on top of image with clip rrect', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
rc.clipRRect(RRect.fromLTRBR(75, 75, 160, 160, const Radius.circular(5)));
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight), SurfacePaint());
rc.drawCircle(
const Offset(100, 100),
50.0,
SurfacePaint()
..strokeWidth = 3
..color = const Color.fromARGB(128, 0, 0, 0));
rc.restore();
await canvasScreenshot(rc, 'draw_circle_on_image_clip_rrect');
});
// Regression test for https://github.com/flutter/flutter/issues/44845
// Circle should draw on top of image with clip rrect.
test('Paints on top of image with clip path', () async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
final Path path = Path();
// Triangle.
path.moveTo(118, 57);
path.lineTo(75, 160);
path.lineTo(160, 160);
rc.clipPath(path);
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
Rect.fromLTRB(100, 30, 2 * testWidth, 2 * testHeight), SurfacePaint());
rc.drawCircle(
const Offset(100, 100),
50.0,
SurfacePaint()
..strokeWidth = 3
..color = const Color.fromARGB(128, 0, 0, 0));
rc.restore();
await canvasScreenshot(rc, 'draw_circle_on_image_clip_path');
});
// Regression test for https://github.com/flutter/flutter/issues/53078
// Verified that Text+Image+Text+Rect+Text composites correctly.
// Yellow text should be behind image and rectangle.
// Cyan text should be above everything.
test('Paints text above and below image', () async {
// Use a non-Ahem font so that text is visible.
ui_web.debugEmulateFlutterTesterEnvironment = false;
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300));
rc.save();
final Image testImage = createTestImage();
final double testWidth = testImage.width.toDouble();
final double testHeight = testImage.height.toDouble();
const Color orange = Color(0xFFFF9800);
final Paragraph paragraph1 = createTestParagraph(
'Should be below below below below below',
color: orange);
paragraph1.layout(const ParagraphConstraints(width: 400.0));
rc.drawParagraph(paragraph1, const Offset(20, 100));
rc.drawImageRect(testImage, Rect.fromLTRB(0, 0, testWidth, testHeight),
const Rect.fromLTRB(100, 100, 200, 200), SurfacePaint());
rc.drawRect(
const Rect.fromLTWH(50, 50, 100, 200),
SurfacePaint()
..strokeWidth = 3
..color = const Color(0xA0000000));
const Color cyan = Color(0xFF0097A7);
final Paragraph paragraph2 = createTestParagraph(
'Should be above above above above above',
color: cyan);
paragraph2.layout(const ParagraphConstraints(width: 400.0));
rc.drawParagraph(paragraph2, const Offset(20, 150));
rc.restore();
await canvasScreenshot(
rc,
'draw_text_composite_order_below',
region: const Rect.fromLTWH(0, 0, 350, 300),
);
});
// Creates a picture
test('Paints nine slice image', () async {
const Rect region = Rect.fromLTWH(0, 0, 500, 500);
final EnginePictureRecorder recorder = EnginePictureRecorder();
final Canvas canvas = Canvas(recorder, region);
final Image testImage = createNineSliceImage();
canvas.clipRect(const Rect.fromLTWH(0, 0, 420, 200));
canvas.drawImageNine(testImage, const Rect.fromLTWH(20, 20, 20, 20),
const Rect.fromLTWH(20, 20, 400, 400), SurfacePaint());
final Picture picture = recorder.endRecording();
final SurfaceSceneBuilder builder = SurfaceSceneBuilder();
builder.addPicture(Offset.zero, picture);
// Wrap in <flt-scene> so that our CSS selectors kick in.
final DomElement sceneElement = createDomElement('flt-scene');
if (isIosSafari) {
// Shrink to fit on the iPhone screen.
sceneElement.style.position = 'absolute';
sceneElement.style.transformOrigin = '0 0 0';
sceneElement.style.transform = 'scale(0.3)';
}
try {
sceneElement.append(builder.build().webOnlyRootElement!);
domDocument.body!.append(sceneElement);
await matchGoldenFile('draw_nine_slice.png',
region: region);
} finally {
// The page is reused across tests, so remove the element after taking the
// screenshot.
sceneElement.remove();
}
});
// Regression test for https://github.com/flutter/flutter/issues/78068
// Tests for correct behavior when using drawImageNine with a destination
// size that is too small to render the center portion of the original image.
test('Paints nine slice image', () async {
const Rect region = Rect.fromLTWH(0, 0, 100, 100);
final EnginePictureRecorder recorder = EnginePictureRecorder();
final Canvas canvas = Canvas(recorder, region);
final Image testImage = createNineSliceImage();
canvas.clipRect(const Rect.fromLTWH(0, 0, 100, 100));
// The testImage is 60x60 and the center slice is 20x20 so the edges
// of the image are 40x40. Drawing into a destination that is smaller
// than that will not provide enough room to draw the center portion.
canvas.drawImageNine(testImage, const Rect.fromLTWH(20, 20, 20, 20),
const Rect.fromLTWH(20, 20, 36, 36), SurfacePaint());
final Picture picture = recorder.endRecording();
final SurfaceSceneBuilder builder = SurfaceSceneBuilder();
builder.addPicture(Offset.zero, picture);
// Wrap in <flt-scene> so that our CSS selectors kick in.
final DomElement sceneElement = createDomElement('flt-scene');
if (isIosSafari) {
// Shrink to fit on the iPhone screen.
sceneElement.style.position = 'absolute';
sceneElement.style.transformOrigin = '0 0 0';
sceneElement.style.transform = 'scale(0.3)';
}
try {
sceneElement.append(builder.build().webOnlyRootElement!);
domDocument.body!.append(sceneElement);
await matchGoldenFile('draw_nine_slice_empty_center.png',
region: region);
} finally {
// The page is reused across tests, so remove the element after taking the
// screenshot.
sceneElement.remove();
}
});
// Regression test for https://github.com/flutter/flutter/issues/61691
//
// The bug in bitmap_canvas.dart was that when we transformed and clipped
// the image we did not apply `transform-origin: 0 0 0` to the clipping
// element which resulted in an undesirable offset.
test('Paints clipped and transformed image', () async {
const Rect region = Rect.fromLTRB(0, 0, 60, 70);
final RecordingCanvas canvas = RecordingCanvas(region);
canvas.translate(10, 10);
canvas.transform(Matrix4.rotationZ(0.4).storage);
canvas.clipPath(Path()
..moveTo(10, 10)
..lineTo(50, 10)
..lineTo(50, 30)
..lineTo(10, 30)
..close());
canvas.drawImage(createNineSliceImage(), Offset.zero, SurfacePaint());
await canvasScreenshot(canvas, 'draw_clipped_and_transformed_image',
region: region);
});
/// Regression test for https://github.com/flutter/flutter/issues/61245
test('Should render image with perspective', () async {
const Rect region = Rect.fromLTRB(0, 0, 200, 200);
final RecordingCanvas canvas = RecordingCanvas(region);
canvas.translate(10, 10);
canvas.drawImage(createTestImage(), Offset.zero, SurfacePaint());
final Matrix4 transform = Matrix4.identity()
..setRotationY(0.8)
..setEntry(3, 2, 0.0005); // perspective
canvas.transform(transform.storage);
canvas.drawImage(createTestImage(), const Offset(0, 100), SurfacePaint());
await canvasScreenshot(canvas, 'draw_3d_image',
region: region,
setupPerspective: true);
});
/// Regression test for https://github.com/flutter/flutter/issues/61245
test('Should render image with perspective inside clip area', () async {
const Rect region = Rect.fromLTRB(0, 0, 200, 200);
final RecordingCanvas canvas = RecordingCanvas(region);
canvas.drawRect(region, SurfacePaint()..color = const Color(0xFFE0E0E0));
canvas.translate(10, 10);
canvas.drawImage(createTestImage(), Offset.zero, SurfacePaint());
final Matrix4 transform = Matrix4.identity()
..setRotationY(0.8)
..setEntry(3, 2, 0.0005); // perspective
canvas.transform(transform.storage);
canvas.clipRect(region, ClipOp.intersect);
canvas.drawRect(const Rect.fromLTWH(0, 0, 100, 200), SurfacePaint()..color = const Color(0x801080E0));
canvas.drawImage(createTestImage(), const Offset(0, 100), SurfacePaint());
canvas.drawRect(const Rect.fromLTWH(50, 150, 50, 20), SurfacePaint()..color = const Color(0x80000000));
await canvasScreenshot(canvas, 'draw_3d_image_clipped',
region: region,
setupPerspective: true);
});
test('Should render rect with perspective transform', () async {
const Rect region = Rect.fromLTRB(0, 0, 400, 400);
final RecordingCanvas canvas = RecordingCanvas(region);
canvas.drawRect(region, SurfacePaint()..color = const Color(0xFFE0E0E0));
canvas.translate(20, 20);
canvas.drawRect(const Rect.fromLTWH(0, 0, 100, 40),
SurfacePaint()..color = const Color(0xFF000000));
final Matrix4 transform = Matrix4.identity()
..setRotationY(0.8)
..setEntry(3, 2, 0.001); // perspective
canvas.transform(transform.storage);
canvas.clipRect(region, ClipOp.intersect);
canvas.drawRect(const Rect.fromLTWH(0, 60, 120, 40), SurfacePaint()..color = const Color(0x801080E0));
canvas.drawRect(const Rect.fromLTWH(300, 250, 120, 40), SurfacePaint()..color = const Color(0x80E010E0));
canvas.drawRRect(RRect.fromRectAndRadius(const Rect.fromLTWH(0, 120, 160, 40), const Radius.circular(5)),
SurfacePaint()..color = const Color(0x801080E0));
canvas.drawRRect(RRect.fromRectAndRadius(const Rect.fromLTWH(300, 320, 90, 40), const Radius.circular(20)),
SurfacePaint()..color = const Color(0x80E010E0));
await canvasScreenshot(canvas, 'draw_3d_rect_clipped',
region: region,
setupPerspective: true);
});
test('Should render color and ovals with perspective transform', () async {
const Rect region = Rect.fromLTRB(0, 0, 400, 400);
final RecordingCanvas canvas = RecordingCanvas(region);
canvas.drawRect(region, SurfacePaint()..color = const Color(0xFFFF0000));
canvas.drawColor(const Color(0xFFE0E0E0), BlendMode.src);
canvas.translate(20, 20);
canvas.drawRect(const Rect.fromLTWH(0, 0, 100, 40),
SurfacePaint()..color = const Color(0xFF000000));
final Matrix4 transform = Matrix4.identity()
..setRotationY(0.8)
..setEntry(3, 2, 0.001); // perspective
canvas.transform(transform.storage);
canvas.clipRect(region, ClipOp.intersect);
canvas.drawOval(const Rect.fromLTWH(0, 120, 130, 40),
SurfacePaint()..color = const Color(0x801080E0));
canvas.drawOval(const Rect.fromLTWH(300, 290, 90, 40),
SurfacePaint()..color = const Color(0x80E010E0));
canvas.drawCircle(const Offset(60, 240), 50, SurfacePaint()..color = const Color(0x801080E0));
canvas.drawCircle(const Offset(360, 370), 30, SurfacePaint()..color = const Color(0x80E010E0));
await canvasScreenshot(canvas, 'draw_3d_oval_clipped',
region: region,
setupPerspective: true);
});
test('Should render path with perspective transform', () async {
const Rect region = Rect.fromLTRB(0, 0, 400, 400);
final RecordingCanvas canvas = RecordingCanvas(region);
canvas.drawRect(region, SurfacePaint()..color = const Color(0xFFFF0000));
canvas.drawColor(const Color(0xFFE0E0E0), BlendMode.src);
canvas.translate(20, 20);
canvas.drawRect(const Rect.fromLTWH(0, 0, 100, 20),
SurfacePaint()..color = const Color(0xFF000000));
final Matrix4 transform = Matrix4.identity()
..setRotationY(0.8)
..setEntry(3, 2, 0.001); // perspective
canvas.transform(transform.storage);
canvas.drawRect(const Rect.fromLTWH(0, 120, 130, 40),
SurfacePaint()..color = const Color(0x801080E0));
canvas.drawOval(const Rect.fromLTWH(300, 290, 90, 40),
SurfacePaint()..color = const Color(0x80E010E0));
final Path path = Path();
path.moveTo(50, 50);
path.lineTo(100, 50);
path.lineTo(100, 100);
path.close();
canvas.drawPath(path, SurfacePaint()..color = const Color(0x801080E0));
canvas.drawCircle(const Offset(50, 50), 4, SurfacePaint()..color = const Color(0xFF000000));
canvas.drawCircle(const Offset(100, 100), 4, SurfacePaint()..color = const Color(0xFF000000));
canvas.drawCircle(const Offset(100, 50), 4, SurfacePaint()..color = const Color(0xFF000000));
await canvasScreenshot(canvas, 'draw_3d_path',
region: region,
setupPerspective: true);
});
test('Should render path with perspective transform', () async {
const Rect region = Rect.fromLTRB(0, 0, 400, 400);
final RecordingCanvas canvas = RecordingCanvas(region);
canvas.drawRect(region, SurfacePaint()..color = const Color(0xFFFF0000));
canvas.drawColor(const Color(0xFFE0E0E0), BlendMode.src);
canvas.translate(20, 20);
canvas.drawRect(const Rect.fromLTWH(0, 0, 100, 20),
SurfacePaint()..color = const Color(0xFF000000));
final Matrix4 transform = Matrix4.identity()
..setRotationY(0.8)
..setEntry(3, 2, 0.001); // perspective
canvas.transform(transform.storage);
//canvas.clipRect(region, ClipOp.intersect);
canvas.drawRect(const Rect.fromLTWH(0, 120, 130, 40),
SurfacePaint()..color = const Color(0x801080E0));
canvas.drawOval(const Rect.fromLTWH(300, 290, 90, 40),
SurfacePaint()..color = const Color(0x80E010E0));
final Path path = Path();
path.moveTo(50, 50);
path.lineTo(100, 50);
path.lineTo(100, 100);
path.close();
canvas.drawPath(path, SurfacePaint()..color = const Color(0x801080E0));
canvas.drawCircle(const Offset(50, 50), 4, SurfacePaint()..color = const Color(0xFF000000));
canvas.drawCircle(const Offset(100, 100), 4, SurfacePaint()..color = const Color(0xFF000000));
canvas.drawCircle(const Offset(100, 50), 4, SurfacePaint()..color = const Color(0xFF000000));
await canvasScreenshot(canvas, 'draw_3d_path_clipped',
region: region,
setupPerspective: true);
});
}
// 9 slice test image that has a shiny/glass look.
const String base64PngData = 'iVBORw0KGgoAAAANSUh'
'EUgAAADwAAAA8CAYAAAA6/NlyAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPo'
'AAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAApGVYSWZNTQAqAAAACAAFARIAAwAAAAEAAQA'
'AARoABQAAAAEAAABKARsABQAAAAEAAABSATEAAgAAACAAAABah2kABAAAAAEAAAB6AAAAAAAA'
'AEgAAAABAAAASAAAAAFBZG9iZSBQaG90b3Nob3AgQ1M2IChNYWNpbnRvc2gpAAADoAEAAwAA'
'AAEAAQAAoAIABAAAAAEAAAA8oAMABAAAAAEAAAA8AAAAAKgRPeEAAAAJcEhZcwAACxMAAAs'
'TAQCanBgAAATqaVRYdFhNTDpjb20uYWRvYmUueG1wAAAAAAA8eDp4bXBtZXRhIHhtbG5zOn'
'g9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJYTVAgQ29yZSA1LjQuMCI+CiAgIDxyZGY6Uk'
'RGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW'
'5zIyI+CiAgICAgIDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiCiAgICAgICAgICAg'
'IHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIgogICAgICA'
'gICAgICB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1J'
'lc291cmNlUmVmIyIKICAgICAgICAgICAgeG1sbnM6ZXhpZj0iaHR0cDovL25zLmFkb2JlL'
'mNvbS9leGlmLzEuMC8iCiAgICAgICAgICAgIHhtbG5zOnhtcD0iaHR0cDovL25zLmFkb2'
'JlLmNvbS94YXAvMS4wLyIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmF'
'kb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8eG1wTU06SW5zdGFuY2VJRD54bXAua'
'WlkOjMxRTc0MTc5ODQwQTExRUE5OEU4QUI4OTRCMjhDRUE3PC94bXBNTTpJbnN0YW5jZUl'
'EPgogICAgICAgICA8eG1wTU06RG9jdW1lbnRJRD54bXAuZGlkOjMxRTc0MTdBODQwQTExR'
'UE5OEU4QUI4OTRCMjhDRUE3PC94bXBNTTpEb2N1bWVudElEPgogICAgICAgICA8eG1wTU0'
'6RGVyaXZlZEZyb20gcmRmOnBhcnNlVHlwZT0iUmVzb3VyY2UiPgogICAgICAgICAgICA8c'
'3RSZWY6aW5zdGFuY2VJRD54bXAuZGlkOjAxODAxMTc0MDcyMDY4MTE4MjJBQUI1NDhBQTA'
'zMDNBPC9zdFJlZjppbnN0YW5jZUlEPgogICAgICAgICAgICA8c3RSZWY6ZG9jdW1lbnRJR'
'D54bXAuZGlkOjAxODAxMTc0MDcyMDY4MTE4MjJBQUI1NDhBQTAzMDNBPC9zdFJlZjpkb2N'
'1bWVudElEPgogICAgICAgICA8L3htcE1NOkRlcml2ZWRGcm9tPgogICAgICAgICA8ZXhpZ'
'jpQaXhlbFlEaW1lbnNpb24+NjA8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8'
'ZXhpZjpDb2xvclNwYWNlPjE8L2V4aWY6Q29sb3JTcGFjZT4KICAgICAgICAgPGV4aWY6UGl'
'4ZWxYRGltZW5zaW9uPjYwPC9leGlmOlBpeGVsWERpbWVuc2lvbj4KICAgICAgICAgPHhtcD'
'pDcmVhdG9yVG9vbD5BZG9iZSBQaG90b3Nob3AgQ1M2IChNYWNpbnRvc2gpPC94bXA6Q3Jl'
'YXRvclRvb2w+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YX'
'Rpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZ'
'XRhPgpq1fpCAAAUDUlEQVRoBd1beYxd11n/3fvu2+fNm9VbPB6PHS+ldt0kNKWJk0YkaVJS'
'UtRQhFCqSggRCVWgSKUKEgi1FNSKlvIH/1dVoWqatBKkUYqgAVRaNSTN0jg2buIkE4/XmbF'
'ne/O2u/D7ffedN2/GM9hJCRAf+76z3LN8+/nuOd94jUYjwRWmJFnp6nkeVI+jCJ7vw+Mc9l'
'Z9+M7qLK+MYHPPOmrvjulp17yunxvr+inXWmvh6Bl+2WJw2R7qwJUFehLHiIUEH58LWxIAa'
'RerOoBi1Qi8S71AujaXW69O314k3XvXprURhrZ+ijyJ45HYIlLPWm7cevkVIUw+GieThFzN'
'pAt0EUj0LiYQRNFBxnISs2L/00YRa6NkwHYAdsCLoBqpx7V5WjuTWZEMrat5faIvBgjxjRb'
'ptG+IsKjJ6ToU5UTZLFdPENaXUL+4gNqFi1iuMT8/jcb8AtqkfBxHQNREVG8CzTaLbbaJGJ'
'KKlCApQCkRCCHXELycvyMx2VyWawUI8jnEhQIJnEEuk0U2F6AyOopCdQil/ipKQwMsV+GrH'
'9UqabdJjAAZzuWLABukdREWgB5hSjSRR4py0fq5aUwdfxFTzzyDsz9+FrOPfR9NLIIoot2Z'
'XMsIAeUSeL1zybWpvaMMxr2QdY1RUh89SkYI5qobl5ln+JDsKO89hNFbb8Tm91+P7e+9Hps'
'm9qBQLiNutZAE0vFUCtn1kuStZ7SEcGgUyyBi/tpTT+HFb3wTk1/7KuqcQgBnc2UEWzfD6+'
'9HJiBlCZokQhSWCIbkaIbzrKRU3AwBa+6gwQaSl91Y55iI3JFeKgkOgo6oIyVeo4XW0hzCU1'
'MkdkqUYQxj3xc+jet+5R4M7tlt3M6SST6ZtF5ajTAXSAhs1A4RZTy0KarP/O3f4Sd/+Gksc3'
'Rx/FrkByrItglio4FQFK0vI9PiuM4/P6KeCxjqcIa8T/wM5yTYFAOhLNSka8SG/YigyRgBZ'
'LNPmfBi9ucEIl7skf8Udb2LcszzeVI6i7DAvEBRZvvykRfQYr7pPdfjji98Hrs+eJidfWS'
'yGseRtiA7dNIqhEXRiIaACoX6wgU8+eW/xgtf+kuUt4wjPzyIzMISsFgzg5TJS9cyCKg3YC'
'6xFzdowmwNQ81LBVPlkDoiQiipn9W4XtBtVRvnYJtgpDLxl/NJ9yU1VK+wSQvdjii6DUQt2g'
'zCkAz2o03k54++aCL/kUe+hQN3302KUZcFX8c2aF2lVQgL2TBu09408fRX/gY//LPPoXLgAI'
'pL5OTcIjIUt6BYQEA9aS/MIbk4Y3oqJkmAhJ7AdA+LloSA3qdcTHVbZSVxx6UURVdL+7ua7I'
'HGxPz1N29DQm5HFHEZSEljuGkAzZ+d5HzL+NgTj2P/bXcgCVuENTAVc/N0ERZ3k4hL+hGe'
'feQRPPHJ30bfwQMoTC8gaDSJaAl+u4Vw5hS5QvHGZhQPvxulrVuR3TSKbLmIbDGPDAHJUI'
'yzfLiaGT0ZvtinFSWnZJGFmCVxU1RSYqNHNUgS8pY6G4qjXC+hpW8vN9BcmEfz7HnUJ09h6'
'cWnTIdt3NZxtGln2st1tLaOoH7iZTNsn3j+WWzet5/UpS0hHC51EY6ikJIc4OzRo/jmDdcDW'
'8ZRlvI3G0SmBO/kKzbRtt/8HYzdfhjl8TEUaLBy+QK8XJ6IkPJEJvYFuf6rLN6yxP3bF/9FVN'
'HUkDahVsVgERH06Fc7hDp6FEuVI4q1CBHRZoRUqcbsBUwdewlnHv4Ozr3wY2B4K9qUunB+Ccn'
'EGM4fO4J9n/wEPvqlv0Kxr2xwS02VDGFxty0LWW/gH//i8zjy5a9g9F0H4J2aRXagD94bL2P'
'gF2/FdZ96AJsOXY+gr4iWWBNSwORgaPNn0jwCPwV8bSFFTP0um9h1ZR4irYp++Lj9Wos0Z8/'
'jlSe+h6N//lkkpVE0Kjk0l+po7tiGaSJ936OP4roP30N1Ipwdq20Ia+OOyd3Xf/IMvn34ZpR'
'370d2oYY8N/XM1AmM3H433vfQZ9A3MU7Pjtok4yEgmDLCziCy6srPem0rby9T4qRdqnW6SjS'
'6RXJcHCvSIaERO/kvT+KpT/0eGpu3o91oo1XKYe7MSQzdchPu//o3UN60CVnBw8f4LP5EyzW'
'89Njjphs5ikeWRPDZVkIfbvj9P0Dfrp2o1SgyNAQJrV9Cj8jn9uDTG5LeasvQk2GbPTkSa9WT'
'RcAxAdvWe1b37dgCN5fNW7C1wPF+Nk8YfLTqNdQpmdtvvxPv+tPPwj83hWy1j/ksqvt/Aad/'
'8COcePbZVNU6BAu0X3o0MLOnT+O1L34RA9vGgLMzFOV+RJOvYNdDf4zKnmvRpEUsFqgPpKwM'
'SkxDIXeuVadehU3zcvw2jY30je9lLELqnSRfap0ktBHGto5odLglQybeaf+XcZMPrZ3EV5kup'
'lW05Yl4lDgvTycnyCMfpMRLWnRhucbE7bfh1D/cjOnnfojc5jE6KEvgbo3nHvt77D98C/J9VE'
'3CRjOq7d/HGz990RzFUb7IzNW57yU2YHTvPhLEQ5YTty7OYuH0edROT2HxzHksT0/TanNrWp'
'xDe47cX2zS11623FukJCR8QPE357PWQXF1JvRTYSUHUWaN+zslIdNHJ7JShkfxDCpFeCznRk'
'eQHxlFfvMI+rdsQ9+27SiMDFm/fHUE2266CTNEOEvCtJeX0Ld1DCe/+jVMPfAA9hy6zvyHQK'
'Itakz++w8ovtRJcjJDKnr1OnJEeenlE4j4YbA4OYmZ53+Kucef4t57qrt/Sie0RyoJcO23ZA'
'vLFD0CH5M7Cbnho2rt9lqdzNCZL9SxwtSvkO5YXIe45l2gpFxIZ'
'+6VCdNBDtdGk9/3Pgx/4L0YPXgQxf4BxPQM5WuH3MpydNKSoQriM8Dkc89j97sPmsGjfPhYmj'
'mLcw8/TiM1iICdNalPDicDI3j16w8jnJqhDz1tiGWE0vZxUrFIv5nOIMXEo5RojMRX24dhTgd'
'eX1zypyW06cceO61JtLv2jjuQ9dIuZS3K9dCpIOrpO9vCKJE0nK1aDc3jT2ORz0nOWdq6D+0'
'cR1a2GNPkoVFfDa43/uMZ1D92H8qVCrzlZjM5+q//hu/e9SEM7NqNwlwNOQJugAj2ptx0cmKE'
'nfWJSD2FfGhZa+k/ERK3TCyJtRx/NbHRxqm0cXJ9lKcIC71VqbdK5ZauewFdWT6g6CbcqCOq'
'UXxhiW2UzCLbOEEzDtEqF7AwfQFLYQ2/+9JRXLN7F4KQYnTu1RO2TBDkKJ+LhDoVnFgISI8'
'oBUmDulhr2GQihv6TiUysSWytlKKZtkpuV5J7v9KikuvjciG9Ura+a6pqk1GM6SiBHqDsix'
'm3kQF4VAUdPEgyJH0+9+T87u04f/w4zk9NYsvuCaoNnY25o/9psi+vxp1U2MRCih/1ssgxRV'
'WeU8JHhDAR5Hvxtlvu1CWAaut9tG/31n+uMtcXHInEXfPSxoDISuJc0geKT8Low1X6fvZnL5'
'sP4dcWFzFNdpO39JWlv5yB/7splc9utbcgz8rkd02flWV7e7/9ZVu3A7syfiSa/y+Ep0+8h'
'naL3wR1OuW1J/8J5YBWtNmyTdph3MV7DUIOdHsv+VHq9LncmLTz2/TrQOnAI//eq9Vt95k5'
'dgx17kZ+/eK8MdTbOgyPCK/i7tsE19s9rSO6T5H36AbnCv1Y+NHT/MZfhL8wP297aoZfPT6'
'3pKspybhm6GsHg1V+Zc3y0HERwdJSymFRQ6ZW1s1R6J2MvFxWJV9+An1xmrVUpBsLC7YRyD'
'lQuhqQNTy0XxtGxEkWneXaInUYy/R9WdnALnWGvDOzlIVyQ7lFEYUGz+T8Fo9GVPG43zqKv'
'DPRuxRq4SM+K5f/0tYu1KYVU0VOR/d8ifWrIUmP0zPu9CRVx1h+yA96S5JpfYhepcmJN7+3'
'13FWr0Kknbr6Olq1REvmDuOuJnx1iZd+oBIrougnvJW7WpPEWB8yxNl2oozO4YKgkH6B6'
'VqMMSZedj6HDRL/EMK1Xoq9VgEV2e8gm7YqmEIMtjD6WYp4q6W9WxTIo6aSPyXElyJtD1de'
'PWtrv3byV3c77JsYYTz6o1PF/q5+VfP48vWYm4KWeJsB3h2P6Vyv2Vzt8Lj3ltbOhtu9J5Nu'
'r3VmhnBxHEqT27wKNbHsaXijzbrvJCm6skPPtJdA6s9D8JaTrj/8mvpDWiUQ7jeeQOvAdFHl'
'f55SJv8m+5mZfbPMsyhFNavhmKOvr0jlH553lEITf+rVLL4kt4gC/Xqn9iAkXdgOapyKXde'
'wIVmfIzqZJHHqTXaeSYi7vHu2oredfeqTKFjvJ1CE/H+4Lb/rpGaf5V63XA4cdMwlQfQ7qU'
'LZlsIkQhqriI6VEK6OjdNg8bYiy7uh4vgOzLOR0Qr8Hu6cMffI9XqUXk0OrdKTCJCS/XbKPW'
'+vqNg7f+9J7aWD2ZP/hapbTwRSBEEcMHwjatshXmnnNt5pMToox7vd/p3j4AE974dChDwICH'
'gIL0vtPqJF3bVxVu72sBcAfYAouXEa48rpmyv/dWM1pYU89Qx163SbiK04re8iJ2G6IIh5R'
'h2dImaVKoZ4NROQ2zzSDVBlCEFmUxWtmQWGD+iMORUNi9USsppZTb2Prdbb4Lb3NE/dOQL'
'AwQY0+7u2NBeQnXcu5wLuncRT45TU1ru4DuN7m9z8UhuVI74nrS0sohW3UPrQHRgYHGa8V6'
'B7Yg+VkWEUb72LFwoXKAZB96xZYuGe9HaPQAgQW9Atqjxti7WQTheUs9mdReuGSO9c3eVS'
'IYmry1172ldzpHOZlLCsddJHc6fv3Fpaz82j4xydnbfZh2E4GDnIOJUKr1HpSfoBbxWK1'
'KKNxwy6oQy4zzUtkGUnbjzRMwveQRshygmSoZAR3/Y7g7bDZGefqZfHOsQdHnaznFr+ro1'
'uvkaWLowchxjewymNnfYBu+PtdFu2stwK7qVijgKsgzry+eKGL12L86igIhnXHGO1lp3uy'
'Y3HLFhkiymfpm68j+XdD/M1eCSvXAV5j39LxnX02394trJ0l5SPr0JeXqjWK7WuTMofeTDG'
'LyG16q6jKf08nM4QIk36iOMxqnc/xsI52bQYrBKm8FmERUw1UHd/W3wEFpxSAtJPJVbvdN'
'9d62dfpfMs712SgnLPKOHWwar3JIfrYTRg82Y7R40SZV2sywh/7BIV6g826F6qbDeeR4E5e'
'rVjB00wcs5KHNOK2Yoq29zMSL3I50t0R0bPJVeSqC6bv/rbIIm65lcNH6RXrYpliz9kAJjT'
'cmkf2lG7HlwCEUc4ot40U7CcCditsQvZH+fBmje/bi9Md/HfVHHkXmGsY/zTUQkHLupNr7f'
'3bo1dk/bEvSNiaDGRLGNiP0xN2xe+/F8PAoSgqmoxcp48eQKSJM2S4WixjkCf3YnXebwVI8'
'FDWd5l2UpMUU9Yh8Wk7F14mSxOmyj8Tuv3s2msPGCIbV4yPqjcGinC8l4gpQi4YraJycRPH'
'u7Cd3O1jjFmRCGfMi5RUE2FVioxXLPNrYnRiJzY9+CAWF2YQ9pWIPENEOaFy3YFbmYuEsUSe'
'Wxj3git5Qo3Z4Iloztefg3rKdwqUSccShs4cEY1lyMjUFoNR236W8doeWoMlLE/xI4jE2Xnf'
'r6I6NESEKwz8YfgFtyRjLt9ZIS+xrvRjqbKI8cO3YplUmnv0O8iN74Q/v8xbt4YFbsu8amB'
'qxVlTiM6VpMt16xj71VOlQmsxWRIhCarrR5ui9sjur+maDJQZ2dvkzf8CJj73JxjZuxvV/g'
'rKjKPuPai070EhoEi1EsW6f6CKBm/Wd37013Bsjlep//x9ZHdsZ1QNY6Pma3bJbArCMRrH'
'7T81zbIITGpJk9OwtIVbZ'
'/dNp0M3036d0iP91QvNopo9Ukg3P8v2wWAv6CuXaIz4TR9OMoaaXtXYZx7EjusY7FIYRJkI'
'Z/XBQJV1qYuwsZsvyuU+9FWXMRBuw+77fwuvV4ex8O1vGY4Bg1kUkp8wfkLy7dH39hW84hE'
'KKrhCfB2atgDFz074VaFYbpgkMcJI/V1im7Z4kyb9rQPjtuRTh5QoT1sMjVDCLTVcpN/w2qt'
'kvI+df/QQdrz/RnJ2AGXaozIZmMsytkvi3Jm3i7omli6X+PU0PDBkccnJNWMIPn4fzk3swpn'
'vfRf1I0eM4hokAvtFXqLn6aRkeDhGZ0VzWFwIraVO/HVzJ7ETB21AZ9G1meGr+WR9mDS3mC'
'pLpQhbyi2LJDJvDqKFOkMw0vsw9VaYUumee3HNbb/M6NkJDPDIapB/DzHAs7oU2RWZ05Tda'
'Fr3nRuRS8u8YJtnZMDsLAPReJ1aUxzI1CnMv/o65l9/A0tnX0Vy/gKi4ycMOIePANAjgJ'
'cu8pO9VTWe0dxV1/bpnb10TjldirDPO4roHDttciMbEFpxziqu8YxvHMC5aFhcpTMoqEaH'
'BxEf9+KsZLBcqmLsBqEtE4JZN6XGZi2xNjKixcY9sNQgTrDl5o892rTMLQajHHk+5CE8di'
'fDKCVVXgvDwLptJiOcT7N5XTXglLdqmtQ1l+h6EqTYmFjJR0KZc5QlBWhY5F5BjP70bgqnD'
'nPHSVH9zHPrbNEH6LET0HZn37+xUu5xG8DSqp0V7D0ItwVacEikdSjjjJgoqwuGvXNXGOg'
'Z2x0yEXi0PeqFO87AiFGCkemvOKYkSF/6yS1vnLOWTaHN/VcmnSWt0eHThELBHBiCGisGra'
'SI5l6R3qD0q05ZR4TNVHES7bnltEgcg6JF3uVlyFsBpdB+lzgdRTMHeejneZR0H1BrnKECH'
'7GyVy1BAmcsr17WxYs78QNqQF4bppFXqX9BBqIrzmcExwueASjAFzlaWncpqEpJCXVc7wv'
'Nj7eT/BbztCaofk+k0AAAAAyBMj8AAAAAElFTkSuQmCC';
const String base64ImageUrl = 'data:image/png;base64,$base64PngData';
HtmlImage createNineSliceImage() {
return HtmlImage(
createDomHTMLImageElement()..src = base64ImageUrl,
60,
60,
);
}
HtmlImage createTestImage({int width = 100, int height = 50}) {
final DomCanvasElement canvas =
createDomCanvasElement(width: width, height: height);
final DomCanvasRenderingContext2D ctx = canvas.context2D;
ctx.fillStyle = '#E04040';
ctx.fillRect(0, 0, 33, 50);
ctx.fill();
ctx.fillStyle = '#40E080';
ctx.fillRect(33, 0, 33, 50);
ctx.fill();
ctx.fillStyle = '#2040E0';
ctx.fillRect(66, 0, 33, 50);
ctx.fill();
final DomHTMLImageElement imageElement = createDomHTMLImageElement();
imageElement.src = js_util.callMethod<String>(canvas, 'toDataURL', <dynamic>[]);
return HtmlImage(imageElement, width, height);
}
Paragraph createTestParagraph(String text,
{Color color = const Color(0xFF000000)}) {
final ParagraphBuilder builder = ParagraphBuilder(ParagraphStyle(
fontFamily: 'Roboto',
fontStyle: FontStyle.normal,
fontWeight: FontWeight.normal,
fontSize: 14.0,
));
builder.pushStyle(TextStyle(color: color));
builder.addText(text);
return builder.build();
}
| engine/lib/web_ui/test/html/drawing/canvas_draw_image_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/drawing/canvas_draw_image_golden_test.dart",
"repo_id": "engine",
"token_count": 17503
} | 310 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' hide window;
import '../../common/test_initialization.dart';
import 'helper.dart';
const Rect bounds = Rect.fromLTWH(0, 0, 800, 600);
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
withImplicitView: true,
emulateTesterEnvironment: false,
setUpTestViewDimensions: false,
);
test('draws paragraphs with placeholders', () {
final BitmapCanvas canvas = BitmapCanvas(bounds, RenderStrategy());
Offset offset = Offset.zero;
for (final PlaceholderAlignment placeholderAlignment in PlaceholderAlignment.values) {
final CanvasParagraph paragraph = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: 14.0),
(CanvasParagraphBuilder builder) {
builder.pushStyle(TextStyle(color: black));
builder.addText('Lorem ipsum');
builder.addPlaceholder(
80.0,
50.0,
placeholderAlignment,
baselineOffset: 40.0,
baseline: TextBaseline.alphabetic,
);
builder.pushStyle(TextStyle(color: blue));
builder.addText('dolor sit amet, consecteur.');
},
)..layout(constrain(200.0));
// Draw the paragraph.
canvas.drawParagraph(paragraph, offset);
// Then fill the placeholders.
fillPlaceholder(canvas, offset, paragraph);
offset = offset.translate(0.0, paragraph.height + 30.0);
}
return takeScreenshot(canvas, bounds, 'canvas_paragraph_placeholders');
});
test('draws paragraphs with placeholders and text align', () {
final BitmapCanvas canvas = BitmapCanvas(bounds, RenderStrategy());
const List<TextAlign> aligns = <TextAlign>[
TextAlign.left,
TextAlign.center,
TextAlign.right,
];
Offset offset = Offset.zero;
for (final TextAlign align in aligns) {
final CanvasParagraph paragraph = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: 14.0, textAlign: align),
(CanvasParagraphBuilder builder) {
builder.pushStyle(TextStyle(color: black));
builder.addText('Lorem');
builder.addPlaceholder(80.0, 50.0, PlaceholderAlignment.bottom);
builder.pushStyle(TextStyle(color: blue));
builder.addText('ipsum.');
},
)..layout(constrain(200.0));
// Draw the paragraph.
canvas.drawParagraph(paragraph, offset);
// Then fill the placeholders.
fillPlaceholder(canvas, offset, paragraph);
offset = offset.translate(0.0, paragraph.height + 30.0);
}
return takeScreenshot(canvas, bounds, 'canvas_paragraph_placeholders_align');
});
test('draws paragraphs with placeholders and text align in DOM mode', () {
final DomCanvas canvas = DomCanvas(domDocument.createElement('flt-picture'));
const List<TextAlign> aligns = <TextAlign>[
TextAlign.left,
TextAlign.center,
TextAlign.right,
];
Offset offset = Offset.zero;
for (final TextAlign align in aligns) {
final CanvasParagraph paragraph = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: 14.0, textAlign: align),
(CanvasParagraphBuilder builder) {
builder.pushStyle(TextStyle(color: black));
builder.addText('Lorem');
builder.addPlaceholder(80.0, 50.0, PlaceholderAlignment.bottom);
builder.pushStyle(TextStyle(color: blue));
builder.addText('ipsum.');
},
)..layout(constrain(200.0));
// Draw the paragraph.
canvas.drawParagraph(paragraph, offset);
// Then fill the placeholders.
fillPlaceholder(canvas, offset, paragraph);
offset = offset.translate(0.0, paragraph.height + 30.0);
}
return takeScreenshot(canvas, bounds, 'canvas_paragraph_placeholders_align_dom');
});
test('draws paragraphs starting or ending with a placeholder', () {
const Rect bounds = Rect.fromLTWH(0, 0, 420, 300);
final BitmapCanvas canvas = BitmapCanvas(bounds, RenderStrategy());
Offset offset = const Offset(10, 10);
// First paragraph with a placeholder at the beginning.
final CanvasParagraph paragraph1 = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: 24.0, textAlign: TextAlign.center),
(CanvasParagraphBuilder builder) {
builder.addPlaceholder(80.0, 50.0, PlaceholderAlignment.baseline, baseline: TextBaseline.alphabetic);
builder.pushStyle(TextStyle(color: black));
builder.addText(' Lorem ipsum.');
},
)..layout(constrain(400.0));
// Draw the paragraph.
canvas.drawParagraph(paragraph1, offset);
fillPlaceholder(canvas, offset, paragraph1);
surroundParagraph(canvas, offset, paragraph1);
offset = offset.translate(0.0, paragraph1.height + 30.0);
// Second paragraph with a placeholder at the end.
final CanvasParagraph paragraph2 = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: 24.0, textAlign: TextAlign.center),
(CanvasParagraphBuilder builder) {
builder.pushStyle(TextStyle(color: black));
builder.addText('Lorem ipsum ');
builder.addPlaceholder(80.0, 50.0, PlaceholderAlignment.baseline, baseline: TextBaseline.alphabetic);
},
)..layout(constrain(400.0));
// Draw the paragraph.
canvas.drawParagraph(paragraph2, offset);
fillPlaceholder(canvas, offset, paragraph2);
surroundParagraph(canvas, offset, paragraph2);
offset = offset.translate(0.0, paragraph2.height + 30.0);
// Third paragraph with a placeholder alone in the second line.
final CanvasParagraph paragraph3 = rich(
EngineParagraphStyle(fontFamily: 'Roboto', fontSize: 24.0, textAlign: TextAlign.center),
(CanvasParagraphBuilder builder) {
builder.pushStyle(TextStyle(color: black));
builder.addText('Lorem ipsum ');
builder.addPlaceholder(80.0, 50.0, PlaceholderAlignment.baseline, baseline: TextBaseline.alphabetic);
},
)..layout(constrain(200.0));
// Draw the paragraph.
canvas.drawParagraph(paragraph3, offset);
fillPlaceholder(canvas, offset, paragraph3);
surroundParagraph(canvas, offset, paragraph3);
return takeScreenshot(canvas, bounds, 'canvas_paragraph_placeholders_start_and_end');
});
}
void surroundParagraph(
EngineCanvas canvas,
Offset offset,
CanvasParagraph paragraph,
) {
final Rect rect = offset & Size(paragraph.width, paragraph.height);
final SurfacePaint paint = SurfacePaint()..color = blue..style = PaintingStyle.stroke;
canvas.drawRect(rect, paint.paintData);
}
| engine/lib/web_ui/test/html/paragraph/placeholders_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/paragraph/placeholders_golden_test.dart",
"repo_id": "engine",
"token_count": 2565
} | 311 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:js_util' as js_util;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' hide TextStyle;
import '../../common/test_initialization.dart';
import '../screenshot.dart';
// TODO(yjbanov): unskip Firefox tests when Firefox implements WebGL in headless mode.
// https://github.com/flutter/flutter/issues/86623
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
const double screenWidth = 400.0;
const double screenHeight = 400.0;
const Rect screenRect = Rect.fromLTWH(0, 0, screenWidth, screenHeight);
final HtmlImage testImage = createTestImage();
setUpUnitTests(
setUpTestViewDimensions: false,
);
void drawShapes(RecordingCanvas rc, SurfacePaint paint, Rect shaderRect) {
/// Rect.
rc.drawRect(shaderRect, paint);
shaderRect = shaderRect.translate(100, 0);
/// Circle.
rc.drawCircle(shaderRect.center, shaderRect.width / 2, paint);
shaderRect = shaderRect.translate(110, 0);
/// Oval.
rc.drawOval(Rect.fromLTWH(shaderRect.left, shaderRect.top, shaderRect.width, shaderRect.height / 2), paint);
shaderRect = shaderRect.translate(-210, 120);
/// Path.
final Path path = Path()
..moveTo(shaderRect.center.dx, shaderRect.top)
..lineTo(shaderRect.right, shaderRect.bottom)
..lineTo(shaderRect.left, shaderRect.bottom)
..close();
rc.drawPath(path, paint);
shaderRect = shaderRect.translate(100, 0);
/// RRect.
rc.drawRRect(RRect.fromRectXY(shaderRect, 10, 20), paint);
shaderRect = shaderRect.translate(110, 0);
/// DRRect.
rc.drawDRRect(RRect.fromRectXY(shaderRect, 20, 30),
RRect.fromRectXY(shaderRect.deflate(24), 16, 24),
paint);
shaderRect = shaderRect.translate(-200, 120);
}
Future<void> testImageShader(
TileMode tmx, TileMode tmy, String fileName) async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, screenWidth, screenHeight));
//Rect shaderRect = const Rect.fromLTRB(20, 20, 100, 100);
const Rect shaderRect = Rect.fromLTRB(0, 0, 100, 100);
final SurfacePaint paint = Paint() as SurfacePaint;
paint.shader =
ImageShader(testImage, tmx, tmy, Matrix4.identity().toFloat64()
, filterQuality: FilterQuality.high);
drawShapes(rc, paint, shaderRect);
expect(rc.renderStrategy.hasArbitraryPaint, isTrue);
await canvasScreenshot(rc, fileName,
region: screenRect);
}
test('Should draw with tiled imageshader.', () async {
await testImageShader(
TileMode.repeated, TileMode.repeated, 'image_shader_tiled');
});
test('Should draw with horizontally mirrored imageshader.', () async {
await testImageShader(
TileMode.mirror, TileMode.repeated, 'image_shader_horiz_mirror');
});
test('Should draw with vertically mirrored imageshader.', () async {
await testImageShader(
TileMode.repeated, TileMode.mirror, 'image_shader_vert_mirror');
});
test('Should draw with mirrored imageshader.', () async {
await testImageShader(
TileMode.mirror, TileMode.mirror, 'image_shader_mirror');
});
test('Should draw with horizontal clamp imageshader.', () async {
await testImageShader(
TileMode.clamp, TileMode.repeated, 'image_shader_clamp_horiz');
}, skip: isFirefox);
test('Should draw with vertical clamp imageshader.', () async {
await testImageShader(
TileMode.repeated, TileMode.clamp, 'image_shader_clamp_vertical');
}, skip: isFirefox);
test('Should draw with clamp imageshader.', () async {
await testImageShader(
TileMode.clamp, TileMode.clamp, 'image_shader_clamp');
}, skip: isFirefox);
}
HtmlImage createTestImage() {
const int width = 16;
const int width2 = width ~/ 2;
const int height = 16;
final DomCanvasElement canvas =
createDomCanvasElement(width: width, height: height);
final DomCanvasRenderingContext2D ctx = canvas.context2D;
ctx.fillStyle = '#E04040';
ctx.fillRect(0, 0, width2, width2);
ctx.fill();
ctx.fillStyle = '#40E080';
ctx.fillRect(width2, 0, width2, width2);
ctx.fill();
ctx.fillStyle = '#2040E0';
ctx.fillRect(width2, width2, width2, width2);
ctx.fill();
final DomHTMLImageElement imageElement = createDomHTMLImageElement();
imageElement.src = js_util.callMethod<String>(canvas, 'toDataURL', <dynamic>[]);
return HtmlImage(imageElement, width, height);
}
| engine/lib/web_ui/test/html/shaders/image_shader_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/shaders/image_shader_golden_test.dart",
"repo_id": "engine",
"token_count": 1717
} | 312 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine/browser_detection.dart';
import 'package:ui/ui.dart';
import 'package:web_engine_tester/golden_tester.dart';
import '../common/test_initialization.dart';
import 'utils.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
withImplicitView: true,
setUpTestViewDimensions: false,
);
const Rect region = Rect.fromLTWH(0, 0, 300, 300);
group('Gradients', () {
test('Using a linear gradient on a paint', () async {
final PictureRecorder recorder = PictureRecorder();
final Canvas canvas = Canvas(recorder, region);
canvas.drawRect(
const Rect.fromLTRB(50, 50, 250, 250),
Paint()
..shader = Gradient.linear(
const Offset(50, 50),
const Offset(250, 250),
<Color>[
const Color(0xFFFF0000),
const Color(0xFF00FF00),
const Color(0xFF0000FF),
],
<double>[0.0, 0.5, 1.0],
)
);
await drawPictureUsingCurrentRenderer(recorder.endRecording());
await matchGoldenFile('linear_gradient_paint.png', region: region);
});
test('Using a radial gradient on a paint', () async {
final PictureRecorder recorder = PictureRecorder();
final Canvas canvas = Canvas(recorder, region);
canvas.drawRect(
const Rect.fromLTRB(50, 50, 250, 250),
Paint()
..shader = Gradient.radial(
const Offset(150, 150),
100,
<Color>[
const Color(0xFFFF0000),
const Color(0xFF00FF00),
const Color(0xFF0000FF),
],
<double>[0.0, 0.5, 1.0],
)
);
await drawPictureUsingCurrentRenderer(recorder.endRecording());
await matchGoldenFile('radial_gradient_paint.png', region: region);
});
test('Using a conical gradient on a paint', () async {
final PictureRecorder recorder = PictureRecorder();
final Canvas canvas = Canvas(recorder, region);
canvas.drawRect(
const Rect.fromLTRB(50, 50, 250, 250),
Paint()
..shader = Gradient.radial(
const Offset(200, 200),
100,
<Color>[
const Color(0xFFFF0000),
const Color(0xFF00FF00),
const Color(0xFF0000FF),
],
<double>[0.0, 0.5, 1.0],
TileMode.clamp,
null,
const Offset(50, 50),
5,
)
);
await drawPictureUsingCurrentRenderer(recorder.endRecording());
await matchGoldenFile('conical_gradient_paint.png', region: region);
});
test('Using a sweep gradient on a paint', () async {
final PictureRecorder recorder = PictureRecorder();
final Canvas canvas = Canvas(recorder, region);
canvas.drawRect(
const Rect.fromLTRB(50, 50, 250, 250),
Paint()
..shader = Gradient.sweep(
const Offset(150, 150),
<Color>[
const Color(0xFFFF0000),
const Color(0xFF00FF00),
const Color(0xFF0000FF),
],
<double>[0.0, 0.5, 1.0],
TileMode.clamp,
math.pi / 3.0,
4.0 * math.pi / 3.0,
)
);
await drawPictureUsingCurrentRenderer(recorder.endRecording());
await matchGoldenFile('sweep_gradient_paint.png', region: region);
});
}, skip: isFirefox && isHtml); // https://github.com/flutter/flutter/issues/86623
}
| engine/lib/web_ui/test/ui/gradient_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/ui/gradient_golden_test.dart",
"repo_id": "engine",
"token_count": 1791
} | 313 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'package:web_engine_tester/golden_tester.dart';
import '../common/test_initialization.dart';
import 'utils.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
withImplicitView: true,
emulateTesterEnvironment: false,
setUpTestViewDimensions: false,
);
test('text styles - default', () async {
await testTextStyle('default');
});
test('text styles - center aligned', () async {
await testTextStyle('center aligned',
paragraphTextAlign: ui.TextAlign.center);
});
test('text styles - right aligned', () async {
await testTextStyle('right aligned',
paragraphTextAlign: ui.TextAlign.right);
});
test('text styles - rtl', () async {
await testTextStyle('rtl', paragraphTextDirection: ui.TextDirection.rtl);
});
test('text styles - multiline', () async {
await testTextStyle('multiline', layoutWidth: 50);
});
test('text styles - max lines', () async {
await testTextStyle('max lines', paragraphMaxLines: 1, layoutWidth: 50);
});
test('text styles - ellipsis', () async {
await testTextStyle('ellipsis',
paragraphMaxLines: 1, paragraphEllipsis: '...', layoutWidth: 60);
});
test('text styles - paragraph font family', () async {
await testTextStyle('paragraph font family', paragraphFontFamily: 'Ahem');
});
test('text styles - paragraph font size', () async {
await testTextStyle('paragraph font size', paragraphFontSize: 22);
});
test('text styles - paragraph height', () async {
await testTextStyle('paragraph height',
layoutWidth: 50, paragraphHeight: 1.5);
});
test('text styles - text style height overriding paragraph height', () async {
await testTextStyle('text style height and paragraph style height',
layoutWidth: 50, paragraphHeight: 1.5, height: 2.0);
});
test('text styles - paragraph text height behavior', () async {
await testTextStyle('paragraph text height behavior',
layoutWidth: 50,
paragraphHeight: 1.5,
paragraphTextHeightBehavior: const ui.TextHeightBehavior(
applyHeightToFirstAscent: false,
applyHeightToLastDescent: false,
));
});
test('text styles - paragraph weight', () async {
await testTextStyle('paragraph weight',
paragraphFontWeight: ui.FontWeight.w900);
});
test('text style - paragraph font style', () async {
await testTextStyle(
'paragraph font style',
paragraphFontStyle: ui.FontStyle.italic,
);
});
// TODO(yjbanov): locales specified in paragraph styles don't work:
// https://github.com/flutter/flutter/issues/74687
// TODO(yjbanov): spaces are not rendered correctly:
// https://github.com/flutter/flutter/issues/74742
test('text styles - paragraph locale zh_CN', () async {
await testTextStyle('paragraph locale zh_CN',
outerText: '次 化 刃 直 入 令',
innerText: '',
paragraphLocale: const ui.Locale('zh', 'CN'));
});
test('text styles - paragraph locale zh_TW', () async {
await testTextStyle('paragraph locale zh_TW',
outerText: '次 化 刃 直 入 令',
innerText: '',
paragraphLocale: const ui.Locale('zh', 'TW'));
});
test('text styles - paragraph locale ja', () async {
await testTextStyle('paragraph locale ja',
outerText: '次 化 刃 直 入 令',
innerText: '',
paragraphLocale: const ui.Locale('ja'));
});
test('text styles - paragraph locale ko', () async {
await testTextStyle('paragraph locale ko',
outerText: '次 化 刃 直 入 令',
innerText: '',
paragraphLocale: const ui.Locale('ko'));
});
test('text styles - color', () async {
await testTextStyle('color', color: const ui.Color(0xFF009900));
});
test('text styles - decoration', () async {
await testTextStyle('decoration',
decoration: ui.TextDecoration.underline);
});
test('text styles - decoration style', () async {
await testTextStyle('decoration style',
decoration: ui.TextDecoration.underline,
decorationStyle: ui.TextDecorationStyle.dashed);
});
test('text styles - decoration thickness', () async {
await testTextStyle('decoration thickness',
decoration: ui.TextDecoration.underline, decorationThickness: 5.0);
});
test('text styles - font weight', () async {
await testTextStyle('font weight', fontWeight: ui.FontWeight.w900);
});
test('text styles - font style', () async {
await testTextStyle('font style', fontStyle: ui.FontStyle.italic);
});
// TODO(yjbanov): not sure how to test this.
test('text styles - baseline', () async {
await testTextStyle('baseline',
textBaseline: ui.TextBaseline.ideographic);
});
test('text styles - font family', () async {
await testTextStyle('font family', fontFamily: 'Ahem');
});
test('text styles - non-existent font family', () async {
await testTextStyle('non-existent font family',
fontFamily: 'DoesNotExist');
});
test('text styles - family fallback', () async {
await testTextStyle('family fallback',
fontFamily: 'DoesNotExist', fontFamilyFallback: <String>['Ahem']);
});
test('text styles - font size', () async {
await testTextStyle('font size', fontSize: 24);
});
// A regression test for the special case when CanvasKit would default to
// a positive font size when Flutter specifies zero.
//
// See: https://github.com/flutter/flutter/issues/98248
test('text styles - zero font size', () async {
// This only sets the inner text style, but not the paragraph style, so
// "Hello" should be visible, but "World!" should disappear.
await testTextStyle('zero font size', fontSize: 0);
// This sets the paragraph font size to zero, but the inner text gets
// an explicit non-zero size that should override paragraph properties,
// so this time "Hello" should disappear, but "World!" should still be
// visible.
await testTextStyle('zero paragraph font size', paragraphFontSize: 0, fontSize: 14);
});
test('text styles - letter spacing', () async {
await testTextStyle('letter spacing', letterSpacing: 5);
});
test('text styles - word spacing', () async {
await testTextStyle('word spacing',
innerText: 'Beautiful World!', wordSpacing: 25);
});
test('text styles - height', () async {
await testTextStyle('height', height: 2);
});
test('text styles - leading distribution', () async {
await testTextStyle('half leading',
height: 20,
fontSize: 10,
leadingDistribution: ui.TextLeadingDistribution.even);
await testTextStyle(
'half leading inherited from paragraph',
height: 20,
fontSize: 10,
paragraphTextHeightBehavior: const ui.TextHeightBehavior(
leadingDistribution: ui.TextLeadingDistribution.even,
),
);
await testTextStyle(
'text style half leading overrides paragraph style half leading',
height: 20,
fontSize: 10,
leadingDistribution: ui.TextLeadingDistribution.proportional,
paragraphTextHeightBehavior: const ui.TextHeightBehavior(
leadingDistribution: ui.TextLeadingDistribution.even,
),
);
});
// TODO(yjbanov): locales specified in text styles don't work:
// https://github.com/flutter/flutter/issues/74687
// TODO(yjbanov): spaces are not rendered correctly:
// https://github.com/flutter/flutter/issues/74742
test('text styles - locale zh_CN', () async {
await testTextStyle('locale zh_CN',
innerText: '次 化 刃 直 入 令',
outerText: '',
locale: const ui.Locale('zh', 'CN'));
});
test('text styles - locale zh_TW', () async {
await testTextStyle('locale zh_TW',
innerText: '次 化 刃 直 入 令',
outerText: '',
locale: const ui.Locale('zh', 'TW'));
});
test('text styles - locale ja', () async {
await testTextStyle('locale ja',
innerText: '次 化 刃 直 入 令',
outerText: '',
locale: const ui.Locale('ja'));
});
test('text styles - locale ko', () async {
await testTextStyle('locale ko',
innerText: '次 化 刃 直 入 令',
outerText: '',
locale: const ui.Locale('ko'));
});
test('text styles - background', () async {
await testTextStyle('background',
background: ui.Paint()..color = const ui.Color(0xFF00FF00));
});
test('text styles - foreground', () async {
await testTextStyle('foreground',
foreground: ui.Paint()..color = const ui.Color(0xFF0000FF));
});
test('text styles - foreground and background', () async {
await testTextStyle(
'foreground and background',
foreground: ui.Paint()..color = const ui.Color(0xFFFF5555),
background: ui.Paint()..color = const ui.Color(0xFF007700),
);
});
test('text styles - background and color', () async {
await testTextStyle(
'background and color',
color: const ui.Color(0xFFFFFF00),
background: ui.Paint()..color = const ui.Color(0xFF007700),
);
});
test('text styles - shadows', () async {
await testTextStyle('shadows', shadows: <ui.Shadow>[
const ui.Shadow(
color: ui.Color(0xFF999900),
offset: ui.Offset(10, 10),
blurRadius: 5,
),
const ui.Shadow(
color: ui.Color(0xFF009999),
offset: ui.Offset(-10, -10),
blurRadius: 10,
),
]);
});
test('text styles - old style figures', () async {
await testTextStyle(
'old style figures',
paragraphFontFamily: 'Roboto',
paragraphFontSize: 24,
outerText: '0 1 2 3 4 5 ',
innerText: '0 1 2 3 4 5',
fontFeatures: <ui.FontFeature>[const ui.FontFeature.oldstyleFigures()],
);
});
test('text styles - stylistic set 1', () async {
await testTextStyle(
'stylistic set 1',
paragraphFontFamily: 'Roboto',
paragraphFontSize: 24,
outerText: 'g',
innerText: 'g',
fontFeatures: <ui.FontFeature>[ui.FontFeature.stylisticSet(1)],
);
});
test('text styles - stylistic set 2', () async {
await testTextStyle(
'stylistic set 2',
paragraphFontFamily: 'Roboto',
paragraphFontSize: 24,
outerText: 'α',
innerText: 'α',
fontFeatures: <ui.FontFeature>[ui.FontFeature.stylisticSet(2)],
);
});
test('text styles - override font family', () async {
await testTextStyle(
'override font family',
paragraphFontFamily: 'Ahem',
fontFamily: 'Roboto',
);
});
test('text styles - override font size', () async {
await testTextStyle(
'override font size',
paragraphFontSize: 36,
fontSize: 18,
);
});
test('text style - override font weight', () async {
await testTextStyle(
'override font weight',
paragraphFontWeight: ui.FontWeight.w900,
fontWeight: ui.FontWeight.normal,
);
});
test('text style - override font style', () async {
await testTextStyle(
'override font style',
paragraphFontStyle: ui.FontStyle.italic,
fontStyle: ui.FontStyle.normal,
);
});
test('text style - characters from multiple fallback fonts', () async {
await testTextStyle(
'multi-font characters',
// This character is claimed by multiple fonts. This test makes sure
// we can find a font supporting it.
outerText: '欢',
innerText: '',
);
});
test('text style - symbols', () async {
// One of the CJK fonts loaded in one of the tests above also contains
// some of these symbols. To make sure the test produces predictable
// results we reset the fallback data forcing the engine to reload
// fallbacks, which for this test will only load Noto Symbols.
await testTextStyle(
'symbols',
outerText: '← ↑ → ↓ ',
innerText: '',
);
});
test('strut style - override height', () async {
await testTextStyle(
'strut style',
paragraphStrutStyle: ui.StrutStyle(
forceStrutHeight: true,
height: 2,
),
);
});
test('sample Chinese text', () async {
await testSampleText(
'chinese',
'也称乱数假文或者哑元文本, '
'是印刷及排版领域所常用的虚拟文字。'
'由于曾经一台匿名的打印机刻意打乱了'
'一盒印刷字体从而造出一本字体样品书',
);
});
test('sample Armenian text', () async {
await testSampleText(
'armenian',
'տպագրության և տպագրական արդյունաբերության համար նախատեսված մոդելային տեքստ է',
);
});
test('sample Albanian text', () async {
await testSampleText(
'albanian',
'është një tekst shabllon i industrisë së printimit dhe shtypshkronjave Lorem Ipsum ka qenë teksti shabllon',
);
});
test('sample Arabic text', () async {
await testSampleText(
'arabic',
'هناك حقيقة مثبتة منذ زمن طويل وهي أن المحتوى المقروء لصفحة ما سيلهي',
textDirection: ui.TextDirection.rtl,
);
});
test('sample Bulgarian text', () async {
await testSampleText(
'bulgarian',
'е елементарен примерен текст използван в печатарската и типографската индустрия',
);
});
test('sample Catalan text', () async {
await testSampleText(
'catalan',
'és un text de farciment usat per la indústria de la tipografia i la impremta',
);
});
test('sample English text', () async {
await testSampleText(
'english',
'Lorem Ipsum is simply dummy text of the printing and typesetting industry',
);
});
test('sample Greek text', () async {
await testSampleText(
'greek',
'είναι απλά ένα κείμενο χωρίς νόημα για τους επαγγελματίες της τυπογραφίας και στοιχειοθεσίας',
);
});
test('sample Hebrew text', () async {
await testSampleText(
'hebrew',
'זוהי עובדה מבוססת שדעתו של הקורא תהיה מוסחת על ידי טקטס קריא כאשר הוא יביט בפריסתו',
textDirection: ui.TextDirection.rtl,
);
});
test('sample Hindi text', () async {
await testSampleText(
'hindi',
'छपाई और अक्षर योजन उद्योग का एक साधारण डमी पाठ है सन १५०० के बाद से अभी तक इस उद्योग का मानक डमी पाठ मन गया जब एक अज्ञात मुद्रक ने नमूना लेकर एक नमूना किताब बनाई',
);
});
test('sample Thai text', () async {
await testSampleText(
'thai',
'คือ เนื้อหาจำลองแบบเรียบๆ ที่ใช้กันในธุรกิจงานพิมพ์หรืองานเรียงพิมพ์ มันได้กลายมาเป็นเนื้อหาจำลองมาตรฐานของธุรกิจดังกล่าวมาตั้งแต่ศตวรรษที่',
);
});
test('sample Georgian text', () async {
await testSampleText(
'georgian',
'საბეჭდი და ტიპოგრაფიული ინდუსტრიის უშინაარსო ტექსტია. იგი სტანდარტად',
);
});
test('sample Bengali text', () async {
await testSampleText(
'bengali',
'ঈদের জামাত মসজিদে, মানতে হবে স্বাস্থ্যবিধি: ধর্ম মন্ত্রণালয়',
);
});
test('hindi svayan test', () async {
await testSampleText('hindi_svayan', 'स्वयं');
});
// We've seen text break when we load many fonts simultaneously. This test
// combines text in multiple languages into one long paragraph to make sure
// we can handle it.
test('sample multilingual text', () async {
await testSampleText(
'multilingual',
'也称乱数假文或者哑元文本, 是印刷及排版领域所常用的虚拟文字。 '
'տպագրության և տպագրական արդյունաբերության համար '
'është një tekst shabllon i industrisë së printimit '
' زمن طويل وهي أن المحتوى المقروء لصفحة ما سيلهي '
'е елементарен примерен текст използван в печатарската '
'és un text de farciment usat per la indústria de la '
'Lorem Ipsum is simply dummy text of the printing '
'είναι απλά ένα κείμενο χωρίς νόημα για τους επαγγελματίες '
' זוהי עובדה מבוססת שדעתו של הקורא תהיה מוסחת על ידי טקטס קריא '
'छपाई और अक्षर योजन उद्योग का एक साधारण डमी पाठ है सन '
'คือ เนื้อหาจำลองแบบเรียบๆ ที่ใช้กันในธุรกิจงานพิมพ์หรืองานเรียงพิมพ์ '
'საბეჭდი და ტიპოგრაფიული ინდუსტრიის უშინაარსო ტექსტია ',
);
});
test('emoji text with skin tone', () async {
await testSampleText('emoji_with_skin_tone', '👋🏿 👋🏾 👋🏽 👋🏼 👋🏻');
}, timeout: const Timeout.factor(2));
test('font variations are correctly rendered', () async {
const double testWidth = 300;
final ui.PictureRecorder recorder = ui.PictureRecorder();
final ui.Canvas canvas = ui.Canvas(recorder);
final ui.ParagraphBuilder builder =
ui.ParagraphBuilder(ui.ParagraphStyle(
fontSize: 40.0,
textDirection: ui.TextDirection.ltr,
));
builder.pushStyle(ui.TextStyle(
fontFamily: 'RobotoVariable',
));
builder.addText('Normal\n');
builder.pop();
ui.FontVariation weight(double w) => ui.FontVariation('wght', w);
builder.pushStyle(ui.TextStyle(
fontFamily: 'RobotoVariable',
fontVariations: <ui.FontVariation>[weight(900)],
));
builder.addText('Heavy\n');
builder.pop();
builder.pushStyle(ui.TextStyle(
fontFamily: 'RobotoVariable',
fontVariations: <ui.FontVariation>[weight(100)],
));
builder.addText('Light\n');
builder.pop();
final ui.Paragraph paragraph = builder.build();
paragraph.layout(const ui.ParagraphConstraints(width: testWidth - 20));
canvas.drawParagraph(paragraph, const ui.Offset(10, 10));
final ui.Picture picture = recorder.endRecording();
await drawPictureUsingCurrentRenderer(picture);
await matchGoldenFile(
'ui_text_font_variation.png',
region: ui.Rect.fromLTRB(0, 0, testWidth, paragraph.height + 20),
);
});
}
/// A convenience function for testing paragraph and text styles.
///
/// Renders a paragraph with two pieces of text, [outerText] and [innerText].
/// [outerText] is added to the root of the paragraph where only paragraph
/// style applies. [innerText] is added under a text style with properties
/// set from the arguments to this method. Parameters with prefix "paragraph"
/// are applied to the paragraph style. Others are applied to the text style.
///
/// [name] is the name of the test used as the description on the golden as
/// well as in the golden file name. Avoid special characters. Spaces are OK;
/// they are replaced by "_" in the file name.
///
/// Use [layoutWidth] to customize the width of the paragraph constraints.
Future<void> testTextStyle(
// Test properties
String name, {
double? layoutWidth,
// Top-level text where only paragraph style applies
String outerText = 'Hello ',
// Second-level text where paragraph and text styles both apply.
String innerText = 'World!',
// ParagraphStyle properties
ui.TextAlign? paragraphTextAlign,
ui.TextDirection? paragraphTextDirection,
int? paragraphMaxLines,
String? paragraphFontFamily,
double? paragraphFontSize,
double? paragraphHeight,
ui.TextHeightBehavior? paragraphTextHeightBehavior,
ui.FontWeight? paragraphFontWeight,
ui.FontStyle? paragraphFontStyle,
ui.StrutStyle? paragraphStrutStyle,
String? paragraphEllipsis,
ui.Locale? paragraphLocale,
// TextStyle properties
ui.Color? color,
ui.TextDecoration? decoration,
ui.Color? decorationColor,
ui.TextDecorationStyle? decorationStyle,
double? decorationThickness,
ui.FontWeight? fontWeight,
ui.FontStyle? fontStyle,
ui.TextBaseline? textBaseline,
String? fontFamily,
List<String>? fontFamilyFallback,
double? fontSize,
double? letterSpacing,
double? wordSpacing,
double? height,
ui.TextLeadingDistribution? leadingDistribution,
ui.Locale? locale,
ui.Paint? background,
ui.Paint? foreground,
List<ui.Shadow>? shadows,
List<ui.FontFeature>? fontFeatures,
}) async {
late ui.Rect region;
ui.Picture renderPicture() {
const double testWidth = 512;
final ui.PictureRecorder recorder = ui.PictureRecorder();
final ui.Canvas canvas = ui.Canvas(recorder);
canvas.translate(30, 10);
final ui.ParagraphBuilder descriptionBuilder =
ui.ParagraphBuilder(ui.ParagraphStyle());
descriptionBuilder.addText(name);
final ui.Paragraph descriptionParagraph = descriptionBuilder.build();
descriptionParagraph
.layout(const ui.ParagraphConstraints(width: testWidth / 2 - 70));
const ui.Offset descriptionOffset = ui.Offset(testWidth / 2 + 30, 0);
canvas.drawParagraph(descriptionParagraph, descriptionOffset);
final ui.ParagraphBuilder pb = ui.ParagraphBuilder(ui.ParagraphStyle(
textAlign: paragraphTextAlign,
textDirection: paragraphTextDirection,
maxLines: paragraphMaxLines,
fontFamily: paragraphFontFamily,
fontSize: paragraphFontSize,
height: paragraphHeight,
textHeightBehavior: paragraphTextHeightBehavior,
fontWeight: paragraphFontWeight,
fontStyle: paragraphFontStyle,
strutStyle: paragraphStrutStyle,
ellipsis: paragraphEllipsis,
locale: paragraphLocale,
));
pb.addText(outerText);
pb.pushStyle(ui.TextStyle(
color: color,
decoration: decoration,
decorationColor: decorationColor,
decorationStyle: decorationStyle,
decorationThickness: decorationThickness,
fontWeight: fontWeight,
fontStyle: fontStyle,
textBaseline: textBaseline,
fontFamily: fontFamily,
fontFamilyFallback: fontFamilyFallback,
fontSize: fontSize,
letterSpacing: letterSpacing,
wordSpacing: wordSpacing,
height: height,
leadingDistribution: leadingDistribution,
locale: locale,
background: background,
foreground: foreground,
shadows: shadows,
fontFeatures: fontFeatures,
));
pb.addText(innerText);
pb.pop();
final ui.Paragraph p = pb.build();
p.layout(ui.ParagraphConstraints(width: layoutWidth ?? testWidth / 2));
canvas.drawParagraph(p, ui.Offset.zero);
canvas.drawPath(
ui.Path()
..moveTo(-10, 0)
..lineTo(-20, 0)
..lineTo(-20, p.height)
..lineTo(-10, p.height),
ui.Paint()
..style = ui.PaintingStyle.stroke
..strokeWidth = 1.0,
);
canvas.drawPath(
ui.Path()
..moveTo(testWidth / 2 + 10, 0)
..lineTo(testWidth / 2 + 20, 0)
..lineTo(testWidth / 2 + 20, p.height)
..lineTo(testWidth / 2 + 10, p.height),
ui.Paint()
..style = ui.PaintingStyle.stroke
..strokeWidth = 1.0,
);
const double padding = 20;
region = ui.Rect.fromLTRB(
0,
0,
testWidth,
math.max(
descriptionOffset.dy + descriptionParagraph.height + padding,
p.height + padding,
),
);
return recorder.endRecording();
}
// Render once to trigger font downloads.
renderPicture();
await renderer.fontCollection.fontFallbackManager?.debugWhenIdle();
final ui.Picture picture = renderPicture();
await drawPictureUsingCurrentRenderer(picture);
await matchGoldenFile(
'ui_text_styles_${name.replaceAll(' ', '_')}.png',
region: region,
);
}
Future<void> testSampleText(String language, String text,
{ui.TextDirection textDirection = ui.TextDirection.ltr}) async {
const double testWidth = 300;
double paragraphHeight = 0;
ui.Picture renderPicture() {
final ui.PictureRecorder recorder = ui.PictureRecorder();
final ui.Canvas canvas = ui.Canvas(recorder);
final ui.ParagraphBuilder paragraphBuilder =
ui.ParagraphBuilder(ui.ParagraphStyle(
textDirection: textDirection,
));
paragraphBuilder.addText(text);
final ui.Paragraph paragraph = paragraphBuilder.build();
paragraph.layout(const ui.ParagraphConstraints(width: testWidth - 20));
canvas.drawParagraph(paragraph, const ui.Offset(10, 10));
paragraphHeight = paragraph.height;
return recorder.endRecording();
}
// Render once to trigger font downloads.
renderPicture();
await renderer.fontCollection.fontFallbackManager?.debugWhenIdle();
final ui.Picture picture = renderPicture();
await drawPictureUsingCurrentRenderer(picture);
await matchGoldenFile(
'ui_sample_text_$language.png',
region: ui.Rect.fromLTRB(0, 0, testWidth, paragraphHeight + 20),
);
}
| engine/lib/web_ui/test/ui/text_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/ui/text_golden_test.dart",
"repo_id": "engine",
"token_count": 10834
} | 314 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/runtime/dart_service_isolate.h"
#include <algorithm>
#include <cstring>
#include "flutter/fml/logging.h"
#include "flutter/fml/posix_wrappers.h"
#include "flutter/runtime/embedder_resources.h"
#include "third_party/dart/runtime/include/dart_api.h"
#include "third_party/tonic/converter/dart_converter.h"
#include "third_party/tonic/dart_library_natives.h"
#include "third_party/tonic/logging/dart_error.h"
#define RETURN_ERROR_HANDLE(handle) \
if (Dart_IsError(handle)) { \
return handle; \
}
#define SHUTDOWN_ON_ERROR(handle) \
if (Dart_IsError(handle)) { \
*error = fml::strdup(Dart_GetError(handle)); \
Dart_ExitScope(); \
Dart_ShutdownIsolate(); \
return false; \
}
namespace flutter {
namespace {
static Dart_LibraryTagHandler g_embedder_tag_handler;
static tonic::DartLibraryNatives* g_natives;
static std::string g_vm_service_uri;
Dart_NativeFunction GetNativeFunction(Dart_Handle name,
int argument_count,
bool* auto_setup_scope) {
FML_CHECK(g_natives);
return g_natives->GetNativeFunction(name, argument_count, auto_setup_scope);
}
const uint8_t* GetSymbol(Dart_NativeFunction native_function) {
FML_CHECK(g_natives);
return g_natives->GetSymbol(native_function);
}
} // namespace
std::mutex DartServiceIsolate::callbacks_mutex_;
std::set<std::unique_ptr<DartServiceIsolate::DartVMServiceServerStateCallback>>
DartServiceIsolate::callbacks_;
void DartServiceIsolate::NotifyServerState(Dart_NativeArguments args) {
Dart_Handle exception = nullptr;
std::string uri =
tonic::DartConverter<std::string>::FromArguments(args, 0, exception);
if (exception) {
return;
}
g_vm_service_uri = uri;
// Collect callbacks to fire in a separate collection and invoke them outside
// the lock.
std::vector<DartServiceIsolate::DartVMServiceServerStateCallback>
callbacks_to_fire;
{
std::scoped_lock lock(callbacks_mutex_);
for (auto& callback : callbacks_) {
callbacks_to_fire.push_back(*callback.get());
}
}
for (const auto& callback_to_fire : callbacks_to_fire) {
callback_to_fire(uri);
}
}
DartServiceIsolate::CallbackHandle DartServiceIsolate::AddServerStatusCallback(
const DartServiceIsolate::DartVMServiceServerStateCallback& callback) {
if (!callback) {
return 0;
}
auto callback_pointer =
std::make_unique<DartServiceIsolate::DartVMServiceServerStateCallback>(
callback);
auto handle = reinterpret_cast<CallbackHandle>(callback_pointer.get());
{
std::scoped_lock lock(callbacks_mutex_);
callbacks_.insert(std::move(callback_pointer));
}
if (!g_vm_service_uri.empty()) {
callback(g_vm_service_uri);
}
return handle;
}
bool DartServiceIsolate::RemoveServerStatusCallback(
CallbackHandle callback_handle) {
std::scoped_lock lock(callbacks_mutex_);
auto found = std::find_if(
callbacks_.begin(), callbacks_.end(),
[callback_handle](const auto& item) {
return reinterpret_cast<CallbackHandle>(item.get()) == callback_handle;
});
if (found == callbacks_.end()) {
return false;
}
callbacks_.erase(found);
return true;
}
void DartServiceIsolate::Shutdown(Dart_NativeArguments args) {
// NO-OP.
}
bool DartServiceIsolate::Startup(const std::string& server_ip,
intptr_t server_port,
Dart_LibraryTagHandler embedder_tag_handler,
bool disable_origin_check,
bool disable_service_auth_codes,
bool enable_service_port_fallback,
char** error) {
Dart_Isolate isolate = Dart_CurrentIsolate();
FML_CHECK(isolate);
// Remember the embedder's library tag handler.
g_embedder_tag_handler = embedder_tag_handler;
FML_CHECK(g_embedder_tag_handler);
// Setup native entries.
if (!g_natives) {
g_natives = new tonic::DartLibraryNatives();
g_natives->Register({
{"VMServiceIO_NotifyServerState", NotifyServerState, 1, true},
{"VMServiceIO_Shutdown", Shutdown, 0, true},
});
}
Dart_Handle uri = Dart_NewStringFromCString("dart:vmservice_io");
Dart_Handle library = Dart_LookupLibrary(uri);
SHUTDOWN_ON_ERROR(library);
Dart_Handle result = Dart_SetRootLibrary(library);
SHUTDOWN_ON_ERROR(result);
result = Dart_SetNativeResolver(library, GetNativeFunction, GetSymbol);
SHUTDOWN_ON_ERROR(result);
library = Dart_RootLibrary();
SHUTDOWN_ON_ERROR(library);
// Set the HTTP server's ip.
result = Dart_SetField(library, Dart_NewStringFromCString("_ip"),
Dart_NewStringFromCString(server_ip.c_str()));
SHUTDOWN_ON_ERROR(result);
// If we have a port specified, start the server immediately.
bool auto_start = server_port >= 0;
if (server_port < 0) {
// Adjust server_port to port 0 which will result in the first available
// port when the HTTP server is started.
server_port = 0;
}
// Set the HTTP's servers port.
result = Dart_SetField(library, Dart_NewStringFromCString("_port"),
Dart_NewInteger(server_port));
SHUTDOWN_ON_ERROR(result);
result = Dart_SetField(library, Dart_NewStringFromCString("_autoStart"),
Dart_NewBoolean(auto_start));
SHUTDOWN_ON_ERROR(result);
result =
Dart_SetField(library, Dart_NewStringFromCString("_originCheckDisabled"),
Dart_NewBoolean(disable_origin_check));
SHUTDOWN_ON_ERROR(result);
result =
Dart_SetField(library, Dart_NewStringFromCString("_authCodesDisabled"),
Dart_NewBoolean(disable_service_auth_codes));
SHUTDOWN_ON_ERROR(result);
result = Dart_SetField(
library, Dart_NewStringFromCString("_enableServicePortFallback"),
Dart_NewBoolean(enable_service_port_fallback));
SHUTDOWN_ON_ERROR(result);
// Make runnable.
Dart_ExitScope();
Dart_ExitIsolate();
*error = Dart_IsolateMakeRunnable(isolate);
if (*error) {
Dart_EnterIsolate(isolate);
Dart_ShutdownIsolate();
return false;
}
Dart_EnterIsolate(isolate);
Dart_EnterScope();
return true;
}
} // namespace flutter
| engine/runtime/dart_service_isolate.cc/0 | {
"file_path": "engine/runtime/dart_service_isolate.cc",
"repo_id": "engine",
"token_count": 2675
} | 315 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/runtime/embedder_resources.h"
#include <cstring>
#include "flutter/fml/logging.h"
namespace flutter {
using runtime::ResourcesEntry;
EmbedderResources::EmbedderResources(ResourcesEntry* resources_table)
: resources_table_(resources_table) {}
const int EmbedderResources::kNoSuchInstance = -1;
int EmbedderResources::ResourceLookup(const char* path, const char** resource) {
for (int i = 0; resources_table_[i].path_ != nullptr; i++) {
const ResourcesEntry& entry = resources_table_[i];
if (strcmp(path, entry.path_) == 0) {
*resource = entry.resource_;
FML_DCHECK(entry.length_ > 0);
return entry.length_;
}
}
return kNoSuchInstance;
}
const char* EmbedderResources::Path(int idx) {
FML_DCHECK(idx >= 0);
ResourcesEntry* entry = At(idx);
if (entry == nullptr) {
return nullptr;
}
FML_DCHECK(entry->path_ != nullptr);
return entry->path_;
}
ResourcesEntry* EmbedderResources::At(int idx) {
FML_DCHECK(idx >= 0);
for (int i = 0; resources_table_[i].path_ != nullptr; i++) {
if (idx == i) {
return &resources_table_[i];
}
}
return nullptr;
}
} // namespace flutter
| engine/runtime/embedder_resources.cc/0 | {
"file_path": "engine/runtime/embedder_resources.cc",
"repo_id": "engine",
"token_count": 485
} | 316 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/runtime/runtime_controller.h"
#include <utility>
#include "flutter/common/constants.h"
#include "flutter/common/settings.h"
#include "flutter/fml/message_loop.h"
#include "flutter/fml/trace_event.h"
#include "flutter/lib/ui/compositing/scene.h"
#include "flutter/lib/ui/ui_dart_state.h"
#include "flutter/lib/ui/window/platform_configuration.h"
#include "flutter/lib/ui/window/viewport_metrics.h"
#include "flutter/runtime/dart_isolate_group_data.h"
#include "flutter/runtime/isolate_configuration.h"
#include "flutter/runtime/runtime_delegate.h"
#include "third_party/tonic/dart_message_handler.h"
namespace flutter {
RuntimeController::RuntimeController(RuntimeDelegate& p_client,
const TaskRunners& task_runners)
: client_(p_client), vm_(nullptr), context_(task_runners) {}
RuntimeController::RuntimeController(
RuntimeDelegate& p_client,
DartVM* p_vm,
fml::RefPtr<const DartSnapshot> p_isolate_snapshot,
const std::function<void(int64_t)>& p_idle_notification_callback,
const PlatformData& p_platform_data,
const fml::closure& p_isolate_create_callback,
const fml::closure& p_isolate_shutdown_callback,
std::shared_ptr<const fml::Mapping> p_persistent_isolate_data,
const UIDartState::Context& p_context)
: client_(p_client),
vm_(p_vm),
isolate_snapshot_(std::move(p_isolate_snapshot)),
idle_notification_callback_(p_idle_notification_callback),
platform_data_(p_platform_data),
isolate_create_callback_(p_isolate_create_callback),
isolate_shutdown_callback_(p_isolate_shutdown_callback),
persistent_isolate_data_(std::move(p_persistent_isolate_data)),
context_(p_context) {}
std::unique_ptr<RuntimeController> RuntimeController::Spawn(
RuntimeDelegate& p_client,
const std::string& advisory_script_uri,
const std::string& advisory_script_entrypoint,
const std::function<void(int64_t)>& p_idle_notification_callback,
const fml::closure& p_isolate_create_callback,
const fml::closure& p_isolate_shutdown_callback,
const std::shared_ptr<const fml::Mapping>& p_persistent_isolate_data,
fml::WeakPtr<IOManager> io_manager,
fml::WeakPtr<ImageDecoder> image_decoder,
fml::WeakPtr<ImageGeneratorRegistry> image_generator_registry,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate) const {
UIDartState::Context spawned_context{context_.task_runners,
std::move(snapshot_delegate),
std::move(io_manager),
context_.unref_queue,
std::move(image_decoder),
std::move(image_generator_registry),
advisory_script_uri,
advisory_script_entrypoint,
context_.volatile_path_tracker,
context_.concurrent_task_runner,
context_.enable_impeller,
context_.runtime_stage_backend};
auto result =
std::make_unique<RuntimeController>(p_client, //
vm_, //
isolate_snapshot_, //
p_idle_notification_callback, //
platform_data_, //
p_isolate_create_callback, //
p_isolate_shutdown_callback, //
p_persistent_isolate_data, //
spawned_context); //
result->spawning_isolate_ = root_isolate_;
return result;
}
RuntimeController::~RuntimeController() {
FML_DCHECK(Dart_CurrentIsolate() == nullptr);
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
if (root_isolate) {
root_isolate->SetReturnCodeCallback(nullptr);
auto result = root_isolate->Shutdown();
if (!result) {
FML_DLOG(ERROR) << "Could not shutdown the root isolate.";
}
root_isolate_ = {};
}
}
bool RuntimeController::IsRootIsolateRunning() {
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
if (root_isolate) {
return root_isolate->GetPhase() == DartIsolate::Phase::Running;
}
return false;
}
std::unique_ptr<RuntimeController> RuntimeController::Clone() const {
return std::make_unique<RuntimeController>(client_, //
vm_, //
isolate_snapshot_, //
idle_notification_callback_, //
platform_data_, //
isolate_create_callback_, //
isolate_shutdown_callback_, //
persistent_isolate_data_, //
context_ //
);
}
bool RuntimeController::FlushRuntimeStateToIsolate() {
FML_DCHECK(!has_flushed_runtime_state_)
<< "FlushRuntimeStateToIsolate is called more than once somehow.";
has_flushed_runtime_state_ = true;
for (auto const& [view_id, viewport_metrics] :
platform_data_.viewport_metrics_for_views) {
if (!AddView(view_id, viewport_metrics)) {
return false;
}
}
return SetLocales(platform_data_.locale_data) &&
SetSemanticsEnabled(platform_data_.semantics_enabled) &&
SetAccessibilityFeatures(
platform_data_.accessibility_feature_flags_) &&
SetUserSettingsData(platform_data_.user_settings_data) &&
SetInitialLifecycleState(platform_data_.lifecycle_state) &&
SetDisplays(platform_data_.displays);
}
bool RuntimeController::AddView(int64_t view_id,
const ViewportMetrics& view_metrics) {
platform_data_.viewport_metrics_for_views[view_id] = view_metrics;
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->AddView(view_id, view_metrics);
return true;
}
return false;
}
bool RuntimeController::RemoveView(int64_t view_id) {
platform_data_.viewport_metrics_for_views.erase(view_id);
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
return platform_configuration->RemoveView(view_id);
}
return false;
}
bool RuntimeController::SetViewportMetrics(int64_t view_id,
const ViewportMetrics& metrics) {
TRACE_EVENT0("flutter", "SetViewportMetrics");
platform_data_.viewport_metrics_for_views[view_id] = metrics;
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
return platform_configuration->UpdateViewMetrics(view_id, metrics);
}
return false;
}
bool RuntimeController::SetLocales(
const std::vector<std::string>& locale_data) {
platform_data_.locale_data = locale_data;
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->UpdateLocales(locale_data);
return true;
}
return false;
}
bool RuntimeController::SetUserSettingsData(const std::string& data) {
platform_data_.user_settings_data = data;
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->UpdateUserSettingsData(
platform_data_.user_settings_data);
return true;
}
return false;
}
bool RuntimeController::SetInitialLifecycleState(const std::string& data) {
platform_data_.lifecycle_state = data;
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->UpdateInitialLifecycleState(
platform_data_.lifecycle_state);
return true;
}
return false;
}
bool RuntimeController::SetSemanticsEnabled(bool enabled) {
platform_data_.semantics_enabled = enabled;
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->UpdateSemanticsEnabled(
platform_data_.semantics_enabled);
return true;
}
return false;
}
bool RuntimeController::SetAccessibilityFeatures(int32_t flags) {
platform_data_.accessibility_feature_flags_ = flags;
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->UpdateAccessibilityFeatures(
platform_data_.accessibility_feature_flags_);
return true;
}
return false;
}
bool RuntimeController::BeginFrame(fml::TimePoint frame_time,
uint64_t frame_number) {
MarkAsFrameBorder();
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->BeginFrame(frame_time, frame_number);
return true;
}
return false;
}
bool RuntimeController::ReportTimings(std::vector<int64_t> timings) {
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->ReportTimings(std::move(timings));
return true;
}
return false;
}
bool RuntimeController::NotifyIdle(fml::TimeDelta deadline) {
if (deadline - fml::TimeDelta::FromMicroseconds(Dart_TimelineGetMicros()) <
fml::TimeDelta::FromMilliseconds(1)) {
// There's less than 1ms left before the deadline. Upstream callers do not
// check to see if the deadline is in the past, and work after this point
// will be in vain.
return false;
}
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
if (!root_isolate) {
return false;
}
tonic::DartState::Scope scope(root_isolate);
Dart_PerformanceMode performance_mode =
PlatformConfigurationNativeApi::GetDartPerformanceMode();
if (performance_mode == Dart_PerformanceMode::Dart_PerformanceMode_Latency) {
return false;
}
Dart_NotifyIdle(deadline.ToMicroseconds());
// Idle notifications being in isolate scope are part of the contract.
if (idle_notification_callback_) {
TRACE_EVENT0("flutter", "EmbedderIdleNotification");
idle_notification_callback_(deadline.ToMicroseconds());
}
return true;
}
bool RuntimeController::NotifyDestroyed() {
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
if (!root_isolate) {
return false;
}
tonic::DartState::Scope scope(root_isolate);
Dart_NotifyDestroyed();
return true;
}
bool RuntimeController::DispatchPlatformMessage(
std::unique_ptr<PlatformMessage> message) {
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
TRACE_EVENT0("flutter", "RuntimeController::DispatchPlatformMessage");
platform_configuration->DispatchPlatformMessage(std::move(message));
return true;
}
return false;
}
bool RuntimeController::DispatchPointerDataPacket(
const PointerDataPacket& packet) {
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
TRACE_EVENT0("flutter", "RuntimeController::DispatchPointerDataPacket");
platform_configuration->DispatchPointerDataPacket(packet);
return true;
}
return false;
}
bool RuntimeController::DispatchSemanticsAction(int32_t node_id,
SemanticsAction action,
fml::MallocMapping args) {
TRACE_EVENT1("flutter", "RuntimeController::DispatchSemanticsAction", "mode",
"basic");
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->DispatchSemanticsAction(node_id, action,
std::move(args));
return true;
}
return false;
}
PlatformConfiguration*
RuntimeController::GetPlatformConfigurationIfAvailable() {
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
return root_isolate ? root_isolate->platform_configuration() : nullptr;
}
// |PlatformConfigurationClient|
std::string RuntimeController::DefaultRouteName() {
return client_.DefaultRouteName();
}
// |PlatformConfigurationClient|
void RuntimeController::ScheduleFrame() {
client_.ScheduleFrame();
}
void RuntimeController::EndWarmUpFrame() {
client_.OnAllViewsRendered();
}
// |PlatformConfigurationClient|
void RuntimeController::Render(int64_t view_id,
Scene* scene,
double width,
double height) {
const ViewportMetrics* view_metrics =
UIDartState::Current()->platform_configuration()->GetMetrics(view_id);
if (view_metrics == nullptr) {
return;
}
client_.Render(view_id, scene->takeLayerTree(width, height),
view_metrics->device_pixel_ratio);
rendered_views_during_frame_.insert(view_id);
CheckIfAllViewsRendered();
}
void RuntimeController::MarkAsFrameBorder() {
rendered_views_during_frame_.clear();
}
void RuntimeController::CheckIfAllViewsRendered() {
if (rendered_views_during_frame_.size() != 0 &&
rendered_views_during_frame_.size() ==
platform_data_.viewport_metrics_for_views.size()) {
client_.OnAllViewsRendered();
MarkAsFrameBorder();
}
}
// |PlatformConfigurationClient|
void RuntimeController::UpdateSemantics(SemanticsUpdate* update) {
if (platform_data_.semantics_enabled) {
client_.UpdateSemantics(update->takeNodes(), update->takeActions());
}
}
// |PlatformConfigurationClient|
void RuntimeController::HandlePlatformMessage(
std::unique_ptr<PlatformMessage> message) {
client_.HandlePlatformMessage(std::move(message));
}
// |PlatformConfigurationClient|
FontCollection& RuntimeController::GetFontCollection() {
return client_.GetFontCollection();
}
// |PlatfromConfigurationClient|
std::shared_ptr<AssetManager> RuntimeController::GetAssetManager() {
return client_.GetAssetManager();
}
// |PlatformConfigurationClient|
void RuntimeController::UpdateIsolateDescription(const std::string isolate_name,
int64_t isolate_port) {
client_.UpdateIsolateDescription(isolate_name, isolate_port);
}
// |PlatformConfigurationClient|
void RuntimeController::SetNeedsReportTimings(bool value) {
client_.SetNeedsReportTimings(value);
}
// |PlatformConfigurationClient|
std::shared_ptr<const fml::Mapping>
RuntimeController::GetPersistentIsolateData() {
return persistent_isolate_data_;
}
// |PlatformConfigurationClient|
std::unique_ptr<std::vector<std::string>>
RuntimeController::ComputePlatformResolvedLocale(
const std::vector<std::string>& supported_locale_data) {
return client_.ComputePlatformResolvedLocale(supported_locale_data);
}
// |PlatformConfigurationClient|
void RuntimeController::SendChannelUpdate(std::string name, bool listening) {
client_.SendChannelUpdate(std::move(name), listening);
}
Dart_Port RuntimeController::GetMainPort() {
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
return root_isolate ? root_isolate->main_port() : ILLEGAL_PORT;
}
std::string RuntimeController::GetIsolateName() {
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
return root_isolate ? root_isolate->debug_name() : "";
}
bool RuntimeController::HasLivePorts() {
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
if (!root_isolate) {
return false;
}
tonic::DartState::Scope scope(root_isolate);
return Dart_HasLivePorts();
}
tonic::DartErrorHandleType RuntimeController::GetLastError() {
std::shared_ptr<DartIsolate> root_isolate = root_isolate_.lock();
return root_isolate ? root_isolate->GetLastError() : tonic::kNoError;
}
bool RuntimeController::LaunchRootIsolate(
const Settings& settings,
const fml::closure& root_isolate_create_callback,
std::optional<std::string> dart_entrypoint,
std::optional<std::string> dart_entrypoint_library,
const std::vector<std::string>& dart_entrypoint_args,
std::unique_ptr<IsolateConfiguration> isolate_configuration) {
if (root_isolate_.lock()) {
FML_LOG(ERROR) << "Root isolate was already running.";
return false;
}
auto strong_root_isolate =
DartIsolate::CreateRunningRootIsolate(
settings, //
isolate_snapshot_, //
std::make_unique<PlatformConfiguration>(this), //
DartIsolate::Flags{}, //
root_isolate_create_callback, //
isolate_create_callback_, //
isolate_shutdown_callback_, //
std::move(dart_entrypoint), //
std::move(dart_entrypoint_library), //
dart_entrypoint_args, //
std::move(isolate_configuration), //
context_, //
spawning_isolate_.lock().get()) //
.lock();
if (!strong_root_isolate) {
FML_LOG(ERROR) << "Could not create root isolate.";
return false;
}
// Enable platform channels for background isolates.
strong_root_isolate->GetIsolateGroupData().SetPlatformMessageHandler(
strong_root_isolate->GetRootIsolateToken(),
client_.GetPlatformMessageHandler());
// The root isolate ivar is weak.
root_isolate_ = strong_root_isolate;
// Capture by `this` here is safe because the callback is made by the dart
// state itself. The isolate (and its Dart state) is owned by this object and
// it will be collected before this object.
strong_root_isolate->SetReturnCodeCallback(
[this](uint32_t code) { root_isolate_return_code_ = code; });
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
tonic::DartState::Scope scope(strong_root_isolate);
platform_configuration->DidCreateIsolate();
if (!FlushRuntimeStateToIsolate()) {
FML_DLOG(ERROR) << "Could not set up initial isolate state.";
}
} else {
FML_DCHECK(false) << "RuntimeController created without window binding.";
}
FML_DCHECK(Dart_CurrentIsolate() == nullptr);
client_.OnRootIsolateCreated();
return true;
}
std::optional<std::string> RuntimeController::GetRootIsolateServiceID() const {
if (auto isolate = root_isolate_.lock()) {
return isolate->GetServiceId();
}
return std::nullopt;
}
std::optional<uint32_t> RuntimeController::GetRootIsolateReturnCode() {
return root_isolate_return_code_;
}
uint64_t RuntimeController::GetRootIsolateGroup() const {
auto isolate = root_isolate_.lock();
if (isolate) {
auto isolate_scope = tonic::DartIsolateScope(isolate->isolate());
Dart_IsolateGroup isolate_group = Dart_CurrentIsolateGroup();
return reinterpret_cast<uint64_t>(isolate_group);
} else {
return 0;
}
}
void RuntimeController::LoadDartDeferredLibrary(
intptr_t loading_unit_id,
std::unique_ptr<const fml::Mapping> snapshot_data,
std::unique_ptr<const fml::Mapping> snapshot_instructions) {
root_isolate_.lock()->LoadLoadingUnit(loading_unit_id,
std::move(snapshot_data),
std::move(snapshot_instructions));
}
void RuntimeController::LoadDartDeferredLibraryError(
intptr_t loading_unit_id,
const std::string
error_message, // NOLINT(performance-unnecessary-value-param)
bool transient) {
root_isolate_.lock()->LoadLoadingUnitError(loading_unit_id, error_message,
transient);
}
void RuntimeController::RequestDartDeferredLibrary(intptr_t loading_unit_id) {
return client_.RequestDartDeferredLibrary(loading_unit_id);
}
bool RuntimeController::SetDisplays(const std::vector<DisplayData>& displays) {
TRACE_EVENT0("flutter", "SetDisplays");
platform_data_.displays = displays;
if (auto* platform_configuration = GetPlatformConfigurationIfAvailable()) {
platform_configuration->UpdateDisplays(displays);
return true;
}
return false;
}
double RuntimeController::GetScaledFontSize(double unscaled_font_size,
int configuration_id) const {
return client_.GetScaledFontSize(unscaled_font_size, configuration_id);
}
void RuntimeController::ShutdownPlatformIsolates() {
platform_isolate_manager_->ShutdownPlatformIsolates();
}
RuntimeController::Locale::Locale(std::string language_code_,
std::string country_code_,
std::string script_code_,
std::string variant_code_)
: language_code(std::move(language_code_)),
country_code(std::move(country_code_)),
script_code(std::move(script_code_)),
variant_code(std::move(variant_code_)) {}
RuntimeController::Locale::~Locale() = default;
} // namespace flutter
| engine/runtime/runtime_controller.cc/0 | {
"file_path": "engine/runtime/runtime_controller.cc",
"repo_id": "engine",
"token_count": 8656
} | 317 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/common/base64.h"
#include "flutter/fml/logging.h"
#include <cstdint>
#define DecodePad -2
#define EncodePad 64
static const char kDefaultEncode[] =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789+/=";
static const signed char kDecodeData[] = {
62, -1, -1, -1, 63, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1,
-1, -1, DecodePad, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
-1, -1, -1, -1, -1, -1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51};
namespace flutter {
Base64::Error Base64::Decode(const void* srcv,
size_t srcLength,
void* dstv,
size_t* dstLength) {
const unsigned char* src = static_cast<const unsigned char*>(srcv);
unsigned char* dst = static_cast<unsigned char*>(dstv);
int i = 0;
bool padTwo = false;
bool padThree = false;
char unsigned const* const end = src + srcLength;
while (src < end) {
unsigned char bytes[4] = {0, 0, 0, 0};
int byte = 0;
do {
unsigned char srcByte = *src++;
if (srcByte == 0) {
*dstLength = i;
return Error::kNone;
}
if (srcByte <= ' ') {
continue; // treat as white space
}
if (srcByte < '+' || srcByte > 'z') {
return Error::kBadChar;
}
signed char decoded = kDecodeData[srcByte - '+'];
bytes[byte] = decoded;
if (decoded != DecodePad) {
if (decoded < 0) {
return Error::kBadChar;
}
byte++;
if (*src) {
continue;
}
if (byte == 0) {
*dstLength = i;
return Error::kNone;
}
if (byte == 4) {
break;
}
}
// As an optimization, if we find an equals sign
// we assume all future bytes to read are the
// appropriate number of padding equals signs.
if (byte < 2) {
return Error::kBadPadding;
}
padThree = true;
if (byte == 2) {
padTwo = true;
}
break;
} while (byte < 4);
int two = 0;
int three = 0;
if (dst) {
int one = (uint8_t)(bytes[0] << 2);
two = bytes[1];
one |= two >> 4;
two = (uint8_t)((two << 4) & 0xFF);
three = bytes[2];
two |= three >> 2;
three = (uint8_t)((three << 6) & 0xFF);
three |= bytes[3];
FML_DCHECK(one < 256 && two < 256 && three < 256);
dst[i] = (unsigned char)one;
}
i++;
if (padTwo) {
break;
}
if (dst) {
dst[i] = (unsigned char)two;
}
i++;
if (padThree) {
break;
}
if (dst) {
dst[i] = (unsigned char)three;
}
i++;
}
*dstLength = i;
return Error::kNone;
}
size_t Base64::Encode(const void* srcv, size_t length, void* dstv) {
FML_DCHECK(dstv);
const unsigned char* src = static_cast<const unsigned char*>(srcv);
unsigned char* dst = static_cast<unsigned char*>(dstv);
const char* encode = kDefaultEncode;
size_t remainder = length % 3;
char unsigned const* const end = &src[length - remainder];
while (src < end) {
unsigned a = *src++;
unsigned b = *src++;
unsigned c = *src++;
int d = c & 0x3F;
c = (c >> 6 | b << 2) & 0x3F;
b = (b >> 4 | a << 4) & 0x3F;
a = a >> 2;
// NOLINTBEGIN(clang-analyzer-core.NullDereference)
*dst++ = encode[a];
*dst++ = encode[b];
*dst++ = encode[c];
*dst++ = encode[d];
// NOLINTEND(clang-analyzer-core.NullDereference)
}
if (remainder > 0) {
int k1 = 0;
int k2 = EncodePad;
int a = (uint8_t)*src++;
if (remainder == 2) {
int b = *src++;
k1 = b >> 4;
k2 = (b << 2) & 0x3F;
}
// NOLINTBEGIN(clang-analyzer-core.NullDereference)
*dst++ = encode[a >> 2];
*dst++ = encode[(k1 | a << 4) & 0x3F];
*dst++ = encode[k2];
*dst++ = encode[EncodePad];
// NOLINTEND(clang-analyzer-core.NullDereference)
}
return EncodedSize(length);
}
} // namespace flutter
| engine/shell/common/base64.cc/0 | {
"file_path": "engine/shell/common/base64.cc",
"repo_id": "engine",
"token_count": 2122
} | 318 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/common/engine.h"
// #include <cstring>
#include "flutter/common/constants.h"
#include "flutter/lib/ui/compositing/scene_builder.h"
#include "flutter/shell/common/shell_test.h"
#include "flutter/testing/fixture_test.h"
#include "gmock/gmock.h"
// CREATE_NATIVE_ENTRY is leaky by design
// NOLINTBEGIN(clang-analyzer-core.StackAddressEscape)
namespace flutter {
namespace {
using ::testing::Invoke;
using ::testing::ReturnRef;
fml::AutoResetWaitableEvent native_latch;
void PostSync(const fml::RefPtr<fml::TaskRunner>& task_runner,
const fml::closure& task) {
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(task_runner, [&latch, &task] {
task();
latch.Signal();
});
latch.Wait();
}
// Sort the argument list of `LayerTreeTask` into a new list that is sorted by
// their view IDs. `FrameItem::layer_tree_tasks` might not come sorted.
std::vector<const LayerTreeTask*> Sorted(
const std::vector<std::unique_ptr<LayerTreeTask>>& layer_tree_tasks) {
std::vector<const LayerTreeTask*> result;
result.reserve(layer_tree_tasks.size());
for (auto& task_ptr : layer_tree_tasks) {
result.push_back(task_ptr.get());
}
std::sort(result.begin(), result.end(),
[](const LayerTreeTask* a, const LayerTreeTask* b) {
return a->view_id < b->view_id;
});
return result;
}
class MockDelegate : public Engine::Delegate {
public:
MOCK_METHOD(void,
OnEngineUpdateSemantics,
(SemanticsNodeUpdates, CustomAccessibilityActionUpdates),
(override));
MOCK_METHOD(void,
OnEngineHandlePlatformMessage,
(std::unique_ptr<PlatformMessage>),
(override));
MOCK_METHOD(void, OnPreEngineRestart, (), (override));
MOCK_METHOD(void, OnRootIsolateCreated, (), (override));
MOCK_METHOD(void,
UpdateIsolateDescription,
(const std::string, int64_t),
(override));
MOCK_METHOD(void, SetNeedsReportTimings, (bool), (override));
MOCK_METHOD(std::unique_ptr<std::vector<std::string>>,
ComputePlatformResolvedLocale,
(const std::vector<std::string>&),
(override));
MOCK_METHOD(void, RequestDartDeferredLibrary, (intptr_t), (override));
MOCK_METHOD(fml::TimePoint, GetCurrentTimePoint, (), (override));
MOCK_METHOD(const std::shared_ptr<PlatformMessageHandler>&,
GetPlatformMessageHandler,
(),
(const, override));
MOCK_METHOD(void, OnEngineChannelUpdate, (std::string, bool), (override));
MOCK_METHOD(double,
GetScaledFontSize,
(double font_size, int configuration_id),
(const, override));
};
class MockAnimatorDelegate : public Animator::Delegate {
public:
/* Animator::Delegate */
MOCK_METHOD(void,
OnAnimatorBeginFrame,
(fml::TimePoint frame_target_time, uint64_t frame_number),
(override));
MOCK_METHOD(void,
OnAnimatorNotifyIdle,
(fml::TimeDelta deadline),
(override));
MOCK_METHOD(void,
OnAnimatorUpdateLatestFrameTargetTime,
(fml::TimePoint frame_target_time),
(override));
MOCK_METHOD(void,
OnAnimatorDraw,
(std::shared_ptr<FramePipeline> pipeline),
(override));
MOCK_METHOD(void,
OnAnimatorDrawLastLayerTrees,
(std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder),
(override));
};
class MockPlatformMessageHandler : public PlatformMessageHandler {
public:
MOCK_METHOD(void,
HandlePlatformMessage,
(std::unique_ptr<PlatformMessage> message),
(override));
MOCK_METHOD(bool,
DoesHandlePlatformMessageOnPlatformThread,
(),
(const, override));
MOCK_METHOD(void,
InvokePlatformMessageResponseCallback,
(int response_id, std::unique_ptr<fml::Mapping> mapping),
(override));
MOCK_METHOD(void,
InvokePlatformMessageEmptyResponseCallback,
(int response_id),
(override));
};
class EngineAnimatorTest : public testing::FixtureTest {
public:
EngineAnimatorTest()
: thread_host_("EngineAnimatorTest",
ThreadHost::Type::kPlatform | ThreadHost::Type::kIo |
ThreadHost::Type::kUi | ThreadHost::Type::kRaster),
task_runners_({
"EngineAnimatorTest",
thread_host_.platform_thread->GetTaskRunner(), // platform
thread_host_.raster_thread->GetTaskRunner(), // raster
thread_host_.ui_thread->GetTaskRunner(), // ui
thread_host_.io_thread->GetTaskRunner() // io
}) {}
void PostUITaskSync(const std::function<void()>& function) {
fml::AutoResetWaitableEvent latch;
task_runners_.GetUITaskRunner()->PostTask([&] {
function();
latch.Signal();
});
latch.Wait();
}
protected:
void SetUp() override {
settings_ = CreateSettingsForFixture();
dispatcher_maker_ = [](PointerDataDispatcher::Delegate&) {
return nullptr;
};
}
MockDelegate delegate_;
PointerDataDispatcherMaker dispatcher_maker_;
ThreadHost thread_host_;
TaskRunners task_runners_;
Settings settings_;
std::unique_ptr<Animator> animator_;
fml::WeakPtr<IOManager> io_manager_;
std::unique_ptr<RuntimeController> runtime_controller_;
std::shared_ptr<fml::ConcurrentTaskRunner> image_decoder_task_runner_;
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate_;
};
// A class that can launch an Engine with the specified Engine::Delegate.
//
// To use this class, contruct this class with Create, call Run, and use the
// engine with EngineTaskSync().
class EngineContext {
public:
using EngineCallback = std::function<void(Engine&)>;
[[nodiscard]] static std::unique_ptr<EngineContext> Create(
Engine::Delegate& delegate, //
Settings settings, //
const TaskRunners& task_runners, //
std::unique_ptr<Animator> animator) {
auto [vm, isolate_snapshot] = Shell::InferVmInitDataFromSettings(settings);
FML_CHECK(vm) << "Must be able to initialize the VM.";
// Construct the class with `new` because `make_unique` has no access to the
// private constructor.
EngineContext* raw_pointer =
new EngineContext(delegate, settings, task_runners, std::move(animator),
vm, isolate_snapshot);
return std::unique_ptr<EngineContext>(raw_pointer);
}
void Run(RunConfiguration configuration) {
PostSync(task_runners_.GetUITaskRunner(), [this, &configuration] {
Engine::RunStatus run_status = engine_->Run(std::move(configuration));
FML_CHECK(run_status == Engine::RunStatus::Success)
<< "Engine failed to run.";
(void)run_status; // Suppress unused-variable warning
});
}
// Run a task that operates the Engine on the UI thread, and wait for the
// task to end.
//
// If called on the UI thread, the task is executed synchronously.
void EngineTaskSync(EngineCallback task) {
ASSERT_TRUE(engine_);
ASSERT_TRUE(task);
auto runner = task_runners_.GetUITaskRunner();
if (runner->RunsTasksOnCurrentThread()) {
task(*engine_);
} else {
PostSync(task_runners_.GetUITaskRunner(), [&]() { task(*engine_); });
}
}
~EngineContext() {
PostSync(task_runners_.GetUITaskRunner(), [this] { engine_.reset(); });
}
private:
EngineContext(Engine::Delegate& delegate, //
Settings settings, //
const TaskRunners& task_runners, //
std::unique_ptr<Animator> animator, //
const DartVMRef& vm, //
fml::RefPtr<const DartSnapshot> isolate_snapshot)
: task_runners_(task_runners), vm_(vm) {
PostSync(task_runners.GetUITaskRunner(), [this, &settings, &animator,
&delegate, &isolate_snapshot] {
auto dispatcher_maker =
[](DefaultPointerDataDispatcher::Delegate& delegate) {
return std::make_unique<DefaultPointerDataDispatcher>(delegate);
};
engine_ = std::make_unique<Engine>(
/*delegate=*/delegate,
/*dispatcher_maker=*/dispatcher_maker,
/*vm=*/*&vm_,
/*isolate_snapshot=*/std::move(isolate_snapshot),
/*task_runners=*/task_runners_,
/*platform_data=*/PlatformData(),
/*settings=*/settings,
/*animator=*/std::move(animator),
/*io_manager=*/io_manager_,
/*unref_queue=*/nullptr,
/*snapshot_delegate=*/snapshot_delegate_,
/*volatile_path_tracker=*/nullptr,
/*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>());
});
}
TaskRunners task_runners_;
DartVMRef vm_;
std::unique_ptr<Engine> engine_;
fml::WeakPtr<IOManager> io_manager_;
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate_;
};
} // namespace
TEST_F(EngineAnimatorTest, AnimatorAcceptsMultipleRenders) {
MockAnimatorDelegate animator_delegate;
std::unique_ptr<EngineContext> engine_context;
std::shared_ptr<PlatformMessageHandler> platform_message_handler =
std::make_shared<MockPlatformMessageHandler>();
EXPECT_CALL(delegate_, GetPlatformMessageHandler)
.WillOnce(ReturnRef(platform_message_handler));
fml::AutoResetWaitableEvent draw_latch;
EXPECT_CALL(animator_delegate, OnAnimatorDraw)
.WillOnce(
Invoke([&draw_latch](const std::shared_ptr<FramePipeline>& pipeline) {
auto status =
pipeline->Consume([&](std::unique_ptr<FrameItem> item) {
auto tasks = Sorted(item->layer_tree_tasks);
EXPECT_EQ(tasks.size(), 2u);
EXPECT_EQ(tasks[0]->view_id, 1);
EXPECT_EQ(tasks[1]->view_id, 2);
});
EXPECT_EQ(status, PipelineConsumeResult::Done);
draw_latch.Signal();
}));
EXPECT_CALL(animator_delegate, OnAnimatorBeginFrame)
.WillOnce(Invoke([&engine_context](fml::TimePoint frame_target_time,
uint64_t frame_number) {
engine_context->EngineTaskSync([&](Engine& engine) {
engine.BeginFrame(frame_target_time, frame_number);
});
}));
native_latch.Reset();
AddNativeCallback("NotifyNative", [](auto args) { native_latch.Signal(); });
std::unique_ptr<Animator> animator;
PostSync(task_runners_.GetUITaskRunner(),
[&animator, &animator_delegate, &task_runners = task_runners_] {
animator = std::make_unique<Animator>(
animator_delegate, task_runners,
static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<testing::ConstantFiringVsyncWaiter>(
task_runners)));
});
engine_context = EngineContext::Create(delegate_, settings_, task_runners_,
std::move(animator));
auto configuration = RunConfiguration::InferFromSettings(settings_);
configuration.SetEntrypoint("onDrawFrameRenderAllViews");
engine_context->Run(std::move(configuration));
engine_context->EngineTaskSync([](Engine& engine) {
engine.AddView(1, ViewportMetrics{1, 10, 10, 22, 0});
engine.AddView(2, ViewportMetrics{1, 10, 10, 22, 0});
});
native_latch.Wait();
engine_context->EngineTaskSync(
[](Engine& engine) { engine.ScheduleFrame(); });
draw_latch.Wait();
}
TEST_F(EngineAnimatorTest, IgnoresOutOfFrameRenders) {
MockAnimatorDelegate animator_delegate;
std::unique_ptr<EngineContext> engine_context;
std::shared_ptr<PlatformMessageHandler> platform_message_handler =
std::make_shared<MockPlatformMessageHandler>();
EXPECT_CALL(delegate_, GetPlatformMessageHandler)
.WillOnce(ReturnRef(platform_message_handler));
fml::AutoResetWaitableEvent draw_latch;
EXPECT_CALL(animator_delegate, OnAnimatorDraw)
.WillOnce(
Invoke([&draw_latch](const std::shared_ptr<FramePipeline>& pipeline) {
auto status =
pipeline->Consume([&](std::unique_ptr<FrameItem> item) {
// View 1 is rendered before the frame, and is ignored.
// View 2 is rendered within the frame, and is accepted.
EXPECT_EQ(item->layer_tree_tasks.size(), 1u);
EXPECT_EQ(item->layer_tree_tasks[0]->view_id, 2);
});
EXPECT_EQ(status, PipelineConsumeResult::Done);
draw_latch.Signal();
}));
EXPECT_CALL(animator_delegate, OnAnimatorBeginFrame)
.WillOnce(Invoke([&engine_context](fml::TimePoint frame_target_time,
uint64_t frame_number) {
engine_context->EngineTaskSync([&](Engine& engine) {
engine.BeginFrame(frame_target_time, frame_number);
});
}));
std::unique_ptr<Animator> animator;
PostSync(task_runners_.GetUITaskRunner(),
[&animator, &animator_delegate, &task_runners = task_runners_] {
animator = std::make_unique<Animator>(
animator_delegate, task_runners,
static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<testing::ConstantFiringVsyncWaiter>(
task_runners)));
});
engine_context = EngineContext::Create(delegate_, settings_, task_runners_,
std::move(animator));
engine_context->EngineTaskSync([](Engine& engine) {
engine.AddView(1, ViewportMetrics{1, 10, 10, 22, 0});
engine.AddView(2, ViewportMetrics{1, 10, 10, 22, 0});
});
auto configuration = RunConfiguration::InferFromSettings(settings_);
configuration.SetEntrypoint("renderViewsInFrameAndOutOfFrame");
engine_context->Run(std::move(configuration));
draw_latch.Wait();
}
TEST_F(EngineAnimatorTest, IgnoresDuplicateRenders) {
MockAnimatorDelegate animator_delegate;
std::unique_ptr<EngineContext> engine_context;
std::vector<std::shared_ptr<Layer>> benchmark_layers;
auto capture_root_layer = [&benchmark_layers](Dart_NativeArguments args) {
auto handle = Dart_GetNativeArgument(args, 0);
intptr_t peer = 0;
Dart_Handle result = Dart_GetNativeInstanceField(
handle, tonic::DartWrappable::kPeerIndex, &peer);
ASSERT_FALSE(Dart_IsError(result));
SceneBuilder* scene_builder = reinterpret_cast<SceneBuilder*>(peer);
ASSERT_TRUE(scene_builder);
std::shared_ptr<ContainerLayer> root_layer =
scene_builder->layer_stack()[0];
ASSERT_TRUE(root_layer);
benchmark_layers = root_layer->layers();
};
std::shared_ptr<PlatformMessageHandler> platform_message_handler =
std::make_shared<MockPlatformMessageHandler>();
EXPECT_CALL(delegate_, GetPlatformMessageHandler)
.WillOnce(ReturnRef(platform_message_handler));
fml::AutoResetWaitableEvent draw_latch;
EXPECT_CALL(animator_delegate, OnAnimatorDraw)
.WillOnce(Invoke([&draw_latch, &benchmark_layers](
const std::shared_ptr<FramePipeline>& pipeline) {
auto status = pipeline->Consume([&](std::unique_ptr<FrameItem> item) {
EXPECT_EQ(item->layer_tree_tasks.size(), 1u);
EXPECT_EQ(item->layer_tree_tasks[0]->view_id, kFlutterImplicitViewId);
ContainerLayer* root_layer = reinterpret_cast<ContainerLayer*>(
item->layer_tree_tasks[0]->layer_tree->root_layer());
std::vector<std::shared_ptr<Layer>> result_layers =
root_layer->layers();
EXPECT_EQ(result_layers.size(), benchmark_layers.size());
EXPECT_EQ(result_layers[0], benchmark_layers[0]);
});
EXPECT_EQ(status, PipelineConsumeResult::Done);
draw_latch.Signal();
}));
EXPECT_CALL(animator_delegate, OnAnimatorBeginFrame)
.WillOnce(Invoke([&engine_context](fml::TimePoint frame_target_time,
uint64_t frame_number) {
engine_context->EngineTaskSync([&](Engine& engine) {
engine.BeginFrame(frame_target_time, frame_number);
});
}));
AddNativeCallback("CaptureRootLayer",
CREATE_NATIVE_ENTRY(capture_root_layer));
std::unique_ptr<Animator> animator;
PostSync(task_runners_.GetUITaskRunner(),
[&animator, &animator_delegate, &task_runners = task_runners_] {
animator = std::make_unique<Animator>(
animator_delegate, task_runners,
static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<testing::ConstantFiringVsyncWaiter>(
task_runners)));
});
engine_context = EngineContext::Create(delegate_, settings_, task_runners_,
std::move(animator));
engine_context->EngineTaskSync([](Engine& engine) {
engine.AddView(kFlutterImplicitViewId, ViewportMetrics{1, 10, 10, 22, 0});
});
auto configuration = RunConfiguration::InferFromSettings(settings_);
configuration.SetEntrypoint("renderTwiceForOneView");
engine_context->Run(std::move(configuration));
draw_latch.Wait();
}
TEST_F(EngineAnimatorTest, AnimatorSubmitsImplicitViewBeforeDrawFrameEnds) {
MockAnimatorDelegate animator_delegate;
std::unique_ptr<EngineContext> engine_context;
std::shared_ptr<PlatformMessageHandler> platform_message_handler =
std::make_shared<MockPlatformMessageHandler>();
EXPECT_CALL(delegate_, GetPlatformMessageHandler)
.WillOnce(ReturnRef(platform_message_handler));
bool rasterization_started = false;
EXPECT_CALL(animator_delegate, OnAnimatorDraw)
.WillOnce(Invoke([&rasterization_started](
const std::shared_ptr<FramePipeline>& pipeline) {
rasterization_started = true;
auto status = pipeline->Consume([&](std::unique_ptr<FrameItem> item) {
EXPECT_EQ(item->layer_tree_tasks.size(), 1u);
EXPECT_EQ(item->layer_tree_tasks[0]->view_id, kFlutterImplicitViewId);
});
EXPECT_EQ(status, PipelineConsumeResult::Done);
}));
EXPECT_CALL(animator_delegate, OnAnimatorBeginFrame)
.WillRepeatedly(Invoke([&engine_context](fml::TimePoint frame_target_time,
uint64_t frame_number) {
engine_context->EngineTaskSync([&](Engine& engine) {
engine.BeginFrame(frame_target_time, frame_number);
});
}));
std::unique_ptr<Animator> animator;
PostSync(task_runners_.GetUITaskRunner(),
[&animator, &animator_delegate, &task_runners = task_runners_] {
animator = std::make_unique<Animator>(
animator_delegate, task_runners,
static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<testing::ConstantFiringVsyncWaiter>(
task_runners)));
});
native_latch.Reset();
// The native_latch is signaled at the end of handleDrawFrame.
AddNativeCallback("NotifyNative",
CREATE_NATIVE_ENTRY([&rasterization_started](auto args) {
EXPECT_EQ(rasterization_started, true);
native_latch.Signal();
}));
engine_context = EngineContext::Create(delegate_, settings_, task_runners_,
std::move(animator));
engine_context->EngineTaskSync([](Engine& engine) {
engine.AddView(kFlutterImplicitViewId, ViewportMetrics{1.0, 10, 10, 1, 0});
});
auto configuration = RunConfiguration::InferFromSettings(settings_);
configuration.SetEntrypoint("renderSingleViewAndCallAfterOnDrawFrame");
engine_context->Run(std::move(configuration));
native_latch.Wait();
}
// The animator should submit to the pipeline the implicit view rendered in a
// warm up frame if there's already a continuation (i.e. Animator::BeginFrame
// has been called)
TEST_F(EngineAnimatorTest, AnimatorSubmitWarmUpImplicitView) {
MockAnimatorDelegate animator_delegate;
std::unique_ptr<EngineContext> engine_context;
std::shared_ptr<PlatformMessageHandler> platform_message_handler =
std::make_shared<MockPlatformMessageHandler>();
EXPECT_CALL(delegate_, GetPlatformMessageHandler)
.WillOnce(ReturnRef(platform_message_handler));
fml::AutoResetWaitableEvent continuation_ready_latch;
fml::AutoResetWaitableEvent draw_latch;
EXPECT_CALL(animator_delegate, OnAnimatorDraw)
.WillOnce(Invoke([&draw_latch](
const std::shared_ptr<FramePipeline>& pipeline) {
auto status = pipeline->Consume([&](std::unique_ptr<FrameItem> item) {
EXPECT_EQ(item->layer_tree_tasks.size(), 1u);
EXPECT_EQ(item->layer_tree_tasks[0]->view_id, kFlutterImplicitViewId);
});
EXPECT_EQ(status, PipelineConsumeResult::Done);
draw_latch.Signal();
}));
EXPECT_CALL(animator_delegate, OnAnimatorBeginFrame)
.WillRepeatedly(
Invoke([&engine_context, &continuation_ready_latch](
fml::TimePoint frame_target_time, uint64_t frame_number) {
continuation_ready_latch.Signal();
engine_context->EngineTaskSync([&](Engine& engine) {
engine.BeginFrame(frame_target_time, frame_number);
});
}));
std::unique_ptr<Animator> animator;
PostSync(task_runners_.GetUITaskRunner(),
[&animator, &animator_delegate, &task_runners = task_runners_] {
animator = std::make_unique<Animator>(
animator_delegate, task_runners,
static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<testing::ConstantFiringVsyncWaiter>(
task_runners)));
});
engine_context = EngineContext::Create(delegate_, settings_, task_runners_,
std::move(animator));
engine_context->EngineTaskSync([](Engine& engine) {
// Schedule a frame to trigger Animator::BeginFrame to create a
// continuation. The continuation needs to be available before `Engine::Run`
// since the Dart program immediately schedules a warm up frame.
engine.ScheduleFrame(true);
// Add the implicit view so that the engine recognizes it and that its
// metrics is not empty.
engine.AddView(kFlutterImplicitViewId, ViewportMetrics{1.0, 10, 10, 1, 0});
});
continuation_ready_latch.Wait();
auto configuration = RunConfiguration::InferFromSettings(settings_);
configuration.SetEntrypoint("renderWarmUpImplicitView");
engine_context->Run(std::move(configuration));
draw_latch.Wait();
}
// The warm up frame should work if only some of the registered views are
// included.
//
// This test also verifies that the warm up frame can render multiple views.
TEST_F(EngineAnimatorTest, AnimatorSubmitPartialViewsForWarmUp) {
MockAnimatorDelegate animator_delegate;
std::unique_ptr<EngineContext> engine_context;
std::shared_ptr<PlatformMessageHandler> platform_message_handler =
std::make_shared<MockPlatformMessageHandler>();
EXPECT_CALL(delegate_, GetPlatformMessageHandler)
.WillOnce(ReturnRef(platform_message_handler));
fml::AutoResetWaitableEvent continuation_ready_latch;
fml::AutoResetWaitableEvent draw_latch;
EXPECT_CALL(animator_delegate, OnAnimatorDraw)
.WillOnce(
Invoke([&draw_latch](const std::shared_ptr<FramePipeline>& pipeline) {
auto status =
pipeline->Consume([&](std::unique_ptr<FrameItem> item) {
auto tasks = Sorted(item->layer_tree_tasks);
EXPECT_EQ(tasks.size(), 2u);
EXPECT_EQ(tasks[0]->view_id, 1);
EXPECT_EQ(tasks[1]->view_id, 2);
});
EXPECT_EQ(status, PipelineConsumeResult::Done);
draw_latch.Signal();
}));
EXPECT_CALL(animator_delegate, OnAnimatorBeginFrame)
.WillRepeatedly(
Invoke([&engine_context, &continuation_ready_latch](
fml::TimePoint frame_target_time, uint64_t frame_number) {
continuation_ready_latch.Signal();
engine_context->EngineTaskSync([&](Engine& engine) {
engine.BeginFrame(frame_target_time, frame_number);
});
}));
std::unique_ptr<Animator> animator;
PostSync(task_runners_.GetUITaskRunner(),
[&animator, &animator_delegate, &task_runners = task_runners_] {
animator = std::make_unique<Animator>(
animator_delegate, task_runners,
static_cast<std::unique_ptr<VsyncWaiter>>(
std::make_unique<testing::ConstantFiringVsyncWaiter>(
task_runners)));
});
engine_context = EngineContext::Create(delegate_, settings_, task_runners_,
std::move(animator));
engine_context->EngineTaskSync([](Engine& engine) {
// Schedule a frame to make the animator create a continuation.
engine.ScheduleFrame(true);
// Add multiple views.
engine.AddView(0, ViewportMetrics{1, 10, 10, 22, 0});
engine.AddView(1, ViewportMetrics{1, 10, 10, 22, 0});
engine.AddView(2, ViewportMetrics{1, 10, 10, 22, 0});
});
continuation_ready_latch.Wait();
auto configuration = RunConfiguration::InferFromSettings(settings_);
configuration.SetEntrypoint("renderWarmUpView1and2");
engine_context->Run(std::move(configuration));
draw_latch.Wait();
}
} // namespace flutter
// NOLINTEND(clang-analyzer-core.StackAddressEscape)
| engine/shell/common/engine_animator_unittests.cc/0 | {
"file_path": "engine/shell/common/engine_animator_unittests.cc",
"repo_id": "engine",
"token_count": 11083
} | 319 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_COMMON_RASTERIZER_H_
#define FLUTTER_SHELL_COMMON_RASTERIZER_H_
#include <memory>
#include <optional>
#include <unordered_map>
#include "flutter/common/settings.h"
#include "flutter/common/task_runners.h"
#include "flutter/display_list/image/dl_image.h"
#include "flutter/flow/compositor_context.h"
#include "flutter/flow/embedded_views.h"
#include "flutter/flow/frame_timings.h"
#include "flutter/flow/layers/layer_tree.h"
#include "flutter/flow/surface.h"
#include "flutter/fml/closure.h"
#include "flutter/fml/memory/weak_ptr.h"
#include "flutter/fml/raster_thread_merger.h"
#include "flutter/fml/synchronization/sync_switch.h"
#include "flutter/fml/synchronization/waitable_event.h"
#include "flutter/fml/time/time_delta.h"
#include "flutter/fml/time/time_point.h"
#if IMPELLER_SUPPORTS_RENDERING
#include "impeller/aiks/aiks_context.h" // nogncheck
#include "impeller/core/formats.h" // nogncheck
#include "impeller/renderer/context.h" // nogncheck
#include "impeller/typographer/backends/skia/typographer_context_skia.h" // nogncheck
#endif // IMPELLER_SUPPORTS_RENDERING
#include "flutter/lib/ui/snapshot_delegate.h"
#include "flutter/shell/common/pipeline.h"
#include "flutter/shell/common/snapshot_controller.h"
#include "flutter/shell/common/snapshot_surface_producer.h"
#include "third_party/skia/include/core/SkData.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/core/SkRect.h"
#include "third_party/skia/include/core/SkRefCnt.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
#if !IMPELLER_SUPPORTS_RENDERING
namespace impeller {
class Context;
class AiksContext;
} // namespace impeller
#endif // !IMPELLER_SUPPORTS_RENDERING
namespace flutter {
// The result status of Rasterizer::Draw. This is only used for unit tests.
enum class DrawStatus {
// The drawing was done without any specified status.
kDone,
// Failed to rasterize the frame because the Rasterizer is not set up.
kNotSetUp,
// Nothing was done, because the call was not on the raster thread. Yielded to
// let this frame be serviced on the right thread.
kYielded,
// Nothing was done, because the pipeline was empty.
kPipelineEmpty,
// Nothing was done, because the GPU was unavailable.
kGpuUnavailable,
};
// The result status of drawing to a view. This is only used for unit tests.
enum class DrawSurfaceStatus {
// The layer tree was successfully rasterized.
kSuccess,
// The layer tree must be submitted again.
//
// This can occur on Android when switching the background surface to
// FlutterImageView. On Android, the first frame doesn't make the image
// available to the ImageReader right away. The second frame does.
// TODO(egarciad): https://github.com/flutter/flutter/issues/65652
//
// This can also occur when the frame is dropped to wait for the thread
// merger to merge the raster and platform threads.
kRetry,
// Failed to rasterize the frame.
kFailed,
// Layer tree was discarded because its size does not match the view size.
// This typically occurs during resizing.
kDiscarded,
};
// The information to draw to all views of a frame.
struct FrameItem {
FrameItem(std::vector<std::unique_ptr<LayerTreeTask>> tasks,
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder)
: layer_tree_tasks(std::move(tasks)),
frame_timings_recorder(std::move(frame_timings_recorder)) {}
std::vector<std::unique_ptr<LayerTreeTask>> layer_tree_tasks;
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder;
};
using FramePipeline = Pipeline<FrameItem>;
//------------------------------------------------------------------------------
/// The rasterizer is a component owned by the shell that resides on the raster
/// task runner. Each shell owns exactly one instance of a rasterizer. The
/// rasterizer may only be created, used and collected on the raster task
/// runner.
///
/// The rasterizer owns the instance of the currently active on-screen render
/// surface. On this surface, it renders the contents of layer trees submitted
/// to it by the `Engine` (which lives on the UI task runner).
///
/// The primary components owned by the rasterizer are the compositor context
/// and the on-screen render surface. The compositor context has all the GPU
/// state necessary to render frames to the render surface.
///
class Rasterizer final : public SnapshotDelegate,
public Stopwatch::RefreshRateUpdater,
public SnapshotController::Delegate {
public:
//----------------------------------------------------------------------------
/// @brief Used to forward events from the rasterizer to interested
/// subsystems. Currently, the shell sets itself up as the
/// rasterizer delegate to listen for frame rasterization events.
/// It can then forward these events to the engine.
///
/// Like all rasterizer operation, the rasterizer delegate call
/// are made on the raster task runner. Any delegate must ensure
/// that they can handle the threading implications.
///
class Delegate {
public:
//--------------------------------------------------------------------------
/// @brief Notifies the delegate that a frame has been rendered. The
/// rasterizer collects profiling information for each part of
/// the frame workload. This profiling information is made
/// available to the delegate for forwarding to subsystems
/// interested in collecting such profiles. Currently, the shell
/// (the delegate) forwards this to the engine where Dart code
/// can react to this information.
///
/// @see `FrameTiming`
///
/// @param[in] frame_timing Instrumentation information for each phase of
/// the frame workload.
///
virtual void OnFrameRasterized(const FrameTiming& frame_timing) = 0;
/// Time limit for a smooth frame.
///
/// See: `DisplayManager::GetMainDisplayRefreshRate`.
virtual fml::Milliseconds GetFrameBudget() = 0;
/// Target time for the latest frame. See also `Shell::OnAnimatorBeginFrame`
/// for when this time gets updated.
virtual fml::TimePoint GetLatestFrameTargetTime() const = 0;
/// Task runners used by the shell.
virtual const TaskRunners& GetTaskRunners() const = 0;
/// The raster thread merger from parent shell's rasterizer.
virtual const fml::RefPtr<fml::RasterThreadMerger>
GetParentRasterThreadMerger() const = 0;
/// Accessor for the shell's GPU sync switch, which determines whether GPU
/// operations are allowed on the current thread.
///
/// For example, on some platforms when the application is backgrounded it
/// is critical that GPU operations are not processed.
virtual std::shared_ptr<const fml::SyncSwitch> GetIsGpuDisabledSyncSwitch()
const = 0;
virtual const Settings& GetSettings() const = 0;
virtual bool ShouldDiscardLayerTree(int64_t view_id,
const flutter::LayerTree& tree) = 0;
};
//----------------------------------------------------------------------------
/// @brief How to handle calls to MakeSkiaGpuImage.
enum class MakeGpuImageBehavior {
/// MakeSkiaGpuImage returns a GPU resident image, if possible.
kGpu,
/// MakeSkiaGpuImage returns a checkerboard bitmap. This is useful in test
/// contexts where no GPU surface is available.
kBitmap,
};
//----------------------------------------------------------------------------
/// @brief Creates a new instance of a rasterizer. Rasterizers may only
/// be created on the raster task runner. Rasterizers are
/// currently only created by the shell (which also sets itself up
/// as the rasterizer delegate).
///
/// @param[in] delegate The rasterizer delegate.
/// @param[in] gpu_image_behavior How to handle calls to
/// MakeSkiaGpuImage.
///
explicit Rasterizer(
Delegate& delegate,
MakeGpuImageBehavior gpu_image_behavior = MakeGpuImageBehavior::kGpu);
//----------------------------------------------------------------------------
/// @brief Destroys the rasterizer. This must happen on the raster task
/// runner. All GPU resources are collected before this call
/// returns. Any context set up by the embedder to hold these
/// resources can be immediately collected as well.
///
~Rasterizer();
void SetImpellerContext(std::weak_ptr<impeller::Context> impeller_context);
//----------------------------------------------------------------------------
/// @brief Rasterizers may be created well before an on-screen surface is
/// available for rendering. Shells usually create a rasterizer in
/// their constructors. Once an on-screen surface is available
/// however, one may be provided to the rasterizer using this
/// call. No rendering may occur before this call. The surface is
/// held till the balancing call to `Rasterizer::Teardown` is
/// made. Calling a setup before tearing down the previous surface
/// (if this is not the first time the surface has been set up) is
/// user error.
///
/// @see `Rasterizer::Teardown`
///
/// @param[in] surface The on-screen render surface.
///
void Setup(std::unique_ptr<Surface> surface);
//----------------------------------------------------------------------------
/// @brief Releases the previously set up on-screen render surface and
/// collects associated resources. No more rendering may occur
/// till the next call to `Rasterizer::Setup` with a new render
/// surface. Calling a teardown without a setup is user error.
/// Calling this method multiple times is safe.
///
void Teardown();
//----------------------------------------------------------------------------
/// @brief Releases any resource used by the external view embedder.
/// For example, overlay surfaces or Android views.
/// On Android, this method post a task to the platform thread,
/// and waits until it completes.
void TeardownExternalViewEmbedder();
//----------------------------------------------------------------------------
/// @brief Notifies the rasterizer that there is a low memory situation
/// and it must purge as many unnecessary resources as possible.
/// Currently, the Skia context associated with onscreen rendering
/// is told to free GPU resources.
///
void NotifyLowMemoryWarning() const;
//----------------------------------------------------------------------------
/// @brief Gets a weak pointer to the rasterizer. The rasterizer may only
/// be accessed on the raster task runner.
///
/// @return The weak pointer to the rasterizer.
///
fml::TaskRunnerAffineWeakPtr<Rasterizer> GetWeakPtr() const;
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> GetSnapshotDelegate() const;
//----------------------------------------------------------------------------
/// @brief Deallocate the resources for displaying a view.
///
/// This method must be called when a view is removed.
///
/// The rasterizer don't need views to be registered. Last-frame
/// states for views are recorded when layer trees are rasterized
/// to the view and used during `Rasterizer::DrawLastLayerTrees`.
///
/// @param[in] view_id The ID of the view.
///
void CollectView(int64_t view_id);
//----------------------------------------------------------------------------
/// @brief Returns the last successfully drawn layer tree for the given
/// view, or nullptr if there isn't any. This is useful during
/// `DrawLastLayerTrees` and computing frame damage.
///
/// @bug https://github.com/flutter/flutter/issues/33939
///
/// @return A pointer to the last layer or `nullptr` if this rasterizer
/// has never rendered a frame to the given view.
///
flutter::LayerTree* GetLastLayerTree(int64_t view_id);
//----------------------------------------------------------------------------
/// @brief Draws the last layer trees with their last configuration. This
/// may seem entirely redundant at first glance. After all, on
/// surface loss and re-acquisition, the framework generates a new
/// layer tree. Otherwise, why render the same contents to the
/// screen again? This is used as an optimization in cases where
/// there are external textures (video or camera streams for
/// example) in referenced in the layer tree. These textures may
/// be updated at a cadence different from that of the Flutter
/// application. Flutter can re-render the layer tree with just
/// the updated textures instead of waiting for the framework to
/// do the work to generate the layer tree describing the same
/// contents.
///
/// Calling this method clears all last layer trees
/// (GetLastLayerTree).
///
void DrawLastLayerTrees(
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder);
// |SnapshotDelegate|
GrDirectContext* GetGrContext() override;
std::shared_ptr<flutter::TextureRegistry> GetTextureRegistry() override;
//----------------------------------------------------------------------------
/// @brief Takes the next item from the layer tree pipeline and executes
/// the raster thread frame workload for that pipeline item to
/// render a frame on the on-screen surface.
///
/// Why does the draw call take a layer tree pipeline and not the
/// layer tree directly?
///
/// The pipeline is the way book-keeping of frame workloads
/// distributed across the multiple threads is managed. The
/// rasterizer deals with the pipelines directly (instead of layer
/// trees which is what it actually renders) because the pipeline
/// consumer's workload must be accounted for within the pipeline
/// itself. If the rasterizer took the layer tree directly, it
/// would have to be taken out of the pipeline. That would signal
/// the end of the frame workload and the pipeline would be ready
/// for new frames. But the last frame has not been rendered by
/// the frame yet! On the other hand, the pipeline must own the
/// layer tree it renders because it keeps a reference to the last
/// layer tree around till a new frame is rendered. So a simple
/// reference wont work either. The `Rasterizer::DoDraw` method
/// actually performs the GPU operations within the layer tree
/// pipeline.
///
/// @see `Rasterizer::DoDraw`
///
/// @param[in] pipeline The layer tree pipeline to take the next layer tree
/// to render from.
///
DrawStatus Draw(const std::shared_ptr<FramePipeline>& pipeline);
//----------------------------------------------------------------------------
/// @brief The type of the screenshot to obtain of the previously
/// rendered layer tree.
///
enum class ScreenshotType {
// NOLINTBEGIN(readability-identifier-naming)
//--------------------------------------------------------------------------
/// A format used to denote a Skia picture. A Skia picture is a serialized
/// representation of an `SkPicture` that can be used to introspect the
/// series of commands used to draw that picture.
///
/// Skia pictures are typically stored as files with the .skp extension on
/// disk. These files may be viewed in an interactive debugger available at
/// https://debugger.skia.org/
///
SkiaPicture,
//--------------------------------------------------------------------------
/// A format used to denote uncompressed image data. For Skia, this format
/// is 32 bits per pixel, 8 bits per component and
/// denoted by the `kN32_SkColorType ` Skia color type. For Impeller, its
/// format is specified in Screenshot::pixel_format.
///
UncompressedImage,
//--------------------------------------------------------------------------
/// A format used to denote compressed image data. The PNG compressed
/// container is used.
///
CompressedImage,
//--------------------------------------------------------------------------
/// Reads the data directly from the Rasterizer's surface. The pixel format
/// is determined from the surface. This is the only way to read wide gamut
/// color data, but isn't supported everywhere.
SurfaceData,
// NOLINTEND(readability-identifier-naming)
};
// Specifies the format of pixel data in a Screenshot.
enum class ScreenshotFormat {
// Unknown format, or Skia default.
kUnknown,
// RGBA 8 bits per channel.
kR8G8B8A8UNormInt,
// BGRA 8 bits per channel.
kB8G8R8A8UNormInt,
// RGBA 16 bit floating point per channel.
kR16G16B16A16Float,
};
//----------------------------------------------------------------------------
/// @brief A POD type used to return the screenshot data along with the
/// size of the frame.
///
struct Screenshot {
//--------------------------------------------------------------------------
/// The data used to describe the screenshot. The data format depends on the
/// type of screenshot taken and any further encoding done to the same.
///
/// @see `ScreenshotType`
///
sk_sp<SkData> data;
//--------------------------------------------------------------------------
/// The size of the screenshot in texels.
///
SkISize frame_size = SkISize::MakeEmpty();
//--------------------------------------------------------------------------
/// Characterization of the format of the data in `data`.
///
std::string format;
//--------------------------------------------------------------------------
/// The pixel format of the data in `data`.
///
/// If the impeller backend is not used, this value is always kUnknown and
/// the data is in RGBA8888 format.
ScreenshotFormat pixel_format = ScreenshotFormat::kUnknown;
//--------------------------------------------------------------------------
/// @brief Creates an empty screenshot
///
Screenshot();
//--------------------------------------------------------------------------
/// @brief Creates a screenshot with the specified data and size.
///
/// @param[in] p_data The screenshot data
/// @param[in] p_size The screenshot size.
/// @param[in] p_format The screenshot format.
/// @param[in] p_pixel_format The screenshot format.
///
Screenshot(sk_sp<SkData> p_data,
SkISize p_size,
const std::string& p_format,
ScreenshotFormat p_pixel_format);
//--------------------------------------------------------------------------
/// @brief The copy constructor for a screenshot.
///
/// @param[in] other The screenshot to copy from.
///
Screenshot(const Screenshot& other);
//--------------------------------------------------------------------------
/// @brief Destroys the screenshot object and releases underlying data.
///
~Screenshot();
};
//----------------------------------------------------------------------------
/// @brief Screenshots the last layer tree to one of the supported
/// screenshot types and optionally Base 64 encodes that data for
/// easier transmission and packaging (usually over the service
/// protocol for instrumentation tools running on the host).
///
/// @param[in] type The type of the screenshot to gather.
/// @param[in] base64_encode Whether Base 64 encoding must be applied to the
/// data after a screenshot has been captured.
///
/// @return A non-empty screenshot if one could be captured. A screenshot
/// capture may fail if there were no layer trees previously
/// rendered by this rasterizer, or, due to an unspecified
/// internal error. Internal error will be logged to the console.
///
Screenshot ScreenshotLastLayerTree(ScreenshotType type, bool base64_encode);
//----------------------------------------------------------------------------
/// @brief Sets a callback that will be executed when the next layer tree
/// in rendered to the on-screen surface. This is used by
/// embedders to listen for one time operations like listening for
/// when the first frame is rendered so that they may hide splash
/// screens.
///
/// The callback is only executed once and dropped on the GPU
/// thread when executed (lambda captures must be able to deal
/// with the threading repercussions of this behavior).
///
/// @param[in] callback The callback to execute when the next layer tree is
/// rendered on-screen.
///
void SetNextFrameCallback(const fml::closure& callback);
//----------------------------------------------------------------------------
/// @brief Set the External View Embedder. This is done on shell
/// initialization. This is non-null on platforms that support
/// embedding externally composited views.
///
/// @param[in] view_embedder The external view embedder object.
///
void SetExternalViewEmbedder(
const std::shared_ptr<ExternalViewEmbedder>& view_embedder);
//----------------------------------------------------------------------------
/// @brief Set the snapshot surface producer. This is done on shell
/// initialization. This is non-null on platforms that support taking
/// GPU accelerated raster snapshots in the background.
///
/// @param[in] producer A surface producer for raster snapshotting when the
/// onscreen surface is not available.
///
void SetSnapshotSurfaceProducer(
std::unique_ptr<SnapshotSurfaceProducer> producer);
//----------------------------------------------------------------------------
/// @brief Returns a pointer to the compositor context used by this
/// rasterizer. This pointer will never be `nullptr`.
///
/// @return The compositor context used by this rasterizer.
///
flutter::CompositorContext* compositor_context() {
return compositor_context_.get();
}
//----------------------------------------------------------------------------
/// @brief Returns the raster thread merger used by this rasterizer.
/// This may be `nullptr`.
///
/// @return The raster thread merger used by this rasterizer.
///
fml::RefPtr<fml::RasterThreadMerger> GetRasterThreadMerger();
//----------------------------------------------------------------------------
/// @brief Skia has no notion of time. To work around the performance
/// implications of this, it may cache GPU resources to reference
/// them from one frame to the next. Using this call, embedders
/// may set the maximum bytes cached by Skia in its caches
/// dedicated to on-screen rendering.
///
/// @attention This cache setting will be invalidated when the surface is
/// torn down via `Rasterizer::Teardown`. This call must be made
/// again with new limits after surface re-acquisition.
///
/// @attention This cache does not describe the entirety of GPU resources
/// that may be cached. The `RasterCache` also holds very large
/// GPU resources.
///
/// @see `RasterCache`
///
/// @param[in] max_bytes The maximum byte size of resource that may be
/// cached for GPU rendering.
/// @param[in] from_user Whether this request was from user code, e.g. via
/// the flutter/skia message channel, in which case
/// it should not be overridden by the platform.
///
void SetResourceCacheMaxBytes(size_t max_bytes, bool from_user);
//----------------------------------------------------------------------------
/// @brief The current value of Skia's resource cache size, if a surface
/// is present.
///
/// @attention This cache does not describe the entirety of GPU resources
/// that may be cached. The `RasterCache` also holds very large
/// GPU resources.
///
/// @see `RasterCache`
///
/// @return The size of Skia's resource cache, if available.
///
std::optional<size_t> GetResourceCacheMaxBytes() const;
//----------------------------------------------------------------------------
/// @brief Enables the thread merger if the external view embedder
/// supports dynamic thread merging.
///
/// @attention This method is thread-safe. When the thread merger is enabled,
/// the raster task queue can run in the platform thread at any
/// time.
///
/// @see `ExternalViewEmbedder`
///
void EnableThreadMergerIfNeeded();
//----------------------------------------------------------------------------
/// @brief Disables the thread merger if the external view embedder
/// supports dynamic thread merging.
///
/// @attention This method is thread-safe. When the thread merger is
/// disabled, the raster task queue will continue to run in the
/// same thread until |EnableThreadMergerIfNeeded| is called.
///
/// @see `ExternalViewEmbedder`
///
void DisableThreadMergerIfNeeded();
//----------------------------------------------------------------------------
/// @brief Returns whether TearDown has been called.
///
/// This method is used only in unit tests.
///
bool IsTornDown();
//----------------------------------------------------------------------------
/// @brief Returns the last status of drawing the specific view.
///
/// This method is used only in unit tests.
///
std::optional<DrawSurfaceStatus> GetLastDrawStatus(int64_t view_id);
private:
// The result status of DoDraw, DrawToSurfaces, and DrawToSurfacesUnsafe.
enum class DoDrawStatus {
// The drawing was done without any specified status.
kDone,
// Frame has been successfully rasterized, but there are additional items
// in the pipeline waiting to be consumed. This is currently only used when
// thread configuration change occurs.
kEnqueuePipeline,
// Failed to rasterize the frame because the Rasterizer is not set up.
kNotSetUp,
// Nothing was done, because GPU was unavailable.
kGpuUnavailable,
};
// The result of DoDraw.
struct DoDrawResult {
// The overall status of the drawing process.
//
// The status of drawing a specific view is available at GetLastDrawStatus.
DoDrawStatus status = DoDrawStatus::kDone;
// The frame item that needs to be submitted again.
//
// See RasterStatus::kResubmit and kSkipAndRetry for when it happens.
//
// If `resubmitted_item` is not null, its `tasks` is guaranteed to be
// non-empty.
std::unique_ptr<FrameItem> resubmitted_item;
};
struct ViewRecord {
std::unique_ptr<LayerTreeTask> last_successful_task;
std::optional<DrawSurfaceStatus> last_draw_status;
};
// |SnapshotDelegate|
std::unique_ptr<GpuImageResult> MakeSkiaGpuImage(
sk_sp<DisplayList> display_list,
const SkImageInfo& image_info) override;
// |SnapshotDelegate|
sk_sp<DlImage> MakeRasterSnapshot(sk_sp<DisplayList> display_list,
SkISize picture_size) override;
// |SnapshotDelegate|
sk_sp<SkImage> ConvertToRasterImage(sk_sp<SkImage> image) override;
// |Stopwatch::Delegate|
/// Time limit for a smooth frame.
///
/// See: `DisplayManager::GetMainDisplayRefreshRate`.
fml::Milliseconds GetFrameBudget() const override;
// |SnapshotController::Delegate|
const std::unique_ptr<Surface>& GetSurface() const override {
return surface_;
}
// |SnapshotController::Delegate|
std::shared_ptr<impeller::AiksContext> GetAiksContext() const override {
#if IMPELLER_SUPPORTS_RENDERING
if (surface_) {
return surface_->GetAiksContext();
}
if (auto context = impeller_context_.lock()) {
return std::make_shared<impeller::AiksContext>(
context, impeller::TypographerContextSkia::Make());
}
#endif
return nullptr;
}
// |SnapshotController::Delegate|
const std::unique_ptr<SnapshotSurfaceProducer>& GetSnapshotSurfaceProducer()
const override {
return snapshot_surface_producer_;
}
// |SnapshotController::Delegate|
std::shared_ptr<const fml::SyncSwitch> GetIsGpuDisabledSyncSwitch()
const override {
return delegate_.GetIsGpuDisabledSyncSwitch();
}
std::pair<sk_sp<SkData>, ScreenshotFormat> ScreenshotLayerTreeAsImage(
flutter::LayerTree* tree,
flutter::CompositorContext& compositor_context,
bool compressed);
// This method starts with the frame timing recorder at build end. This
// method might push it to raster end and get the recorded time, or abort in
// the middle and not get the recorded time.
DoDrawResult DoDraw(
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder,
std::vector<std::unique_ptr<LayerTreeTask>> tasks);
// This method pushes the frame timing recorder from build end to raster end.
DoDrawResult DrawToSurfaces(
FrameTimingsRecorder& frame_timings_recorder,
std::vector<std::unique_ptr<LayerTreeTask>> tasks);
// Draws the specified layer trees to views, assuming we have access to the
// GPU.
//
// If any layer trees need resubmitting, this method returns the frame item to
// be resubmitted. Otherwise, it returns nullptr.
//
// Unsafe because it assumes we have access to the GPU which isn't the case
// when iOS is backgrounded, for example.
//
// This method pushes the frame timing recorder from build end to raster end.
std::unique_ptr<FrameItem> DrawToSurfacesUnsafe(
FrameTimingsRecorder& frame_timings_recorder,
std::vector<std::unique_ptr<LayerTreeTask>> tasks);
// Draws the layer tree to the specified view, assuming we have access to the
// GPU.
//
// This method is not affiliated with the frame timing recorder, but must be
// included between the RasterStart and RasterEnd.
DrawSurfaceStatus DrawToSurfaceUnsafe(
int64_t view_id,
flutter::LayerTree& layer_tree,
float device_pixel_ratio,
std::optional<fml::TimePoint> presentation_time);
ViewRecord& EnsureViewRecord(int64_t view_id);
void FireNextFrameCallbackIfPresent();
static bool ShouldResubmitFrame(const DoDrawResult& result);
static DrawStatus ToDrawStatus(DoDrawStatus status);
bool is_torn_down_ = false;
Delegate& delegate_;
MakeGpuImageBehavior gpu_image_behavior_;
std::weak_ptr<impeller::Context> impeller_context_;
std::unique_ptr<Surface> surface_;
std::unique_ptr<SnapshotSurfaceProducer> snapshot_surface_producer_;
std::unique_ptr<flutter::CompositorContext> compositor_context_;
std::unordered_map<int64_t, ViewRecord> view_records_;
fml::closure next_frame_callback_;
bool user_override_resource_cache_bytes_ = false;
std::optional<size_t> max_cache_bytes_;
fml::RefPtr<fml::RasterThreadMerger> raster_thread_merger_;
std::shared_ptr<ExternalViewEmbedder> external_view_embedder_;
std::unique_ptr<SnapshotController> snapshot_controller_;
// WeakPtrFactory must be the last member.
fml::TaskRunnerAffineWeakPtrFactory<Rasterizer> weak_factory_;
FML_DISALLOW_COPY_AND_ASSIGN(Rasterizer);
};
} // namespace flutter
#endif // FLUTTER_SHELL_COMMON_RASTERIZER_H_
| engine/shell/common/rasterizer.h/0 | {
"file_path": "engine/shell/common/rasterizer.h",
"repo_id": "engine",
"token_count": 10613
} | 320 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#define FML_USED_ON_EMBEDDER
#include "flutter/shell/common/shell_test.h"
#include "flutter/flow/frame_timings.h"
#include "flutter/flow/layers/layer_tree.h"
#include "flutter/flow/layers/transform_layer.h"
#include "flutter/fml/build_config.h"
#include "flutter/fml/make_copyable.h"
#include "flutter/fml/mapping.h"
#include "flutter/runtime/dart_vm.h"
#include "flutter/shell/common/shell_test_platform_view.h"
#include "flutter/shell/common/vsync_waiter_fallback.h"
#include "flutter/testing/testing.h"
namespace flutter {
namespace testing {
constexpr int64_t kImplicitViewId = 0;
FrameContent ViewContent::NoViews() {
return std::map<int64_t, ViewContent>();
}
FrameContent ViewContent::DummyView(double width, double height) {
FrameContent result;
result[kImplicitViewId] = ViewContent{
.viewport_metrics = {1.0, width, height, 22, 0},
.builder = {},
};
return result;
}
FrameContent ViewContent::DummyView(flutter::ViewportMetrics viewport_metrics) {
FrameContent result;
result[kImplicitViewId] = ViewContent{
.viewport_metrics = std::move(viewport_metrics),
.builder = {},
};
return result;
}
FrameContent ViewContent::ImplicitView(double width,
double height,
LayerTreeBuilder builder) {
FrameContent result;
result[kImplicitViewId] = ViewContent{
.viewport_metrics = {1.0, width, height, 22, 0},
.builder = std::move(builder),
};
return result;
}
ShellTest::ShellTest()
: thread_host_("io.flutter.test." + GetCurrentTestName() + ".",
ThreadHost::Type::kPlatform | ThreadHost::Type::kIo |
ThreadHost::Type::kUi | ThreadHost::Type::kRaster) {}
void ShellTest::SendPlatformMessage(Shell* shell,
std::unique_ptr<PlatformMessage> message) {
shell->OnPlatformViewDispatchPlatformMessage(std::move(message));
}
void ShellTest::SendEnginePlatformMessage(
Shell* shell,
std::unique_ptr<PlatformMessage> message) {
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(
shell->GetTaskRunners().GetPlatformTaskRunner(),
fml::MakeCopyable(
[shell, &latch, message = std::move(message)]() mutable {
if (auto engine = shell->weak_engine_) {
engine->HandlePlatformMessage(std::move(message));
}
latch.Signal();
}));
latch.Wait();
}
void ShellTest::PlatformViewNotifyCreated(Shell* shell) {
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(
shell->GetTaskRunners().GetPlatformTaskRunner(), [shell, &latch]() {
shell->GetPlatformView()->NotifyCreated();
latch.Signal();
});
latch.Wait();
}
void ShellTest::PlatformViewNotifyDestroyed(Shell* shell) {
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(
shell->GetTaskRunners().GetPlatformTaskRunner(), [shell, &latch]() {
shell->GetPlatformView()->NotifyDestroyed();
latch.Signal();
});
latch.Wait();
}
void ShellTest::RunEngine(Shell* shell, RunConfiguration configuration) {
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(
shell->GetTaskRunners().GetPlatformTaskRunner(),
[shell, &latch, &configuration]() {
shell->RunEngine(std::move(configuration),
[&latch](Engine::RunStatus run_status) {
ASSERT_EQ(run_status, Engine::RunStatus::Success);
latch.Signal();
});
});
latch.Wait();
}
void ShellTest::RestartEngine(Shell* shell, RunConfiguration configuration) {
std::promise<bool> restarted;
fml::TaskRunner::RunNowOrPostTask(
shell->GetTaskRunners().GetUITaskRunner(),
[shell, &restarted, &configuration]() {
restarted.set_value(shell->engine_->Restart(std::move(configuration)));
});
ASSERT_TRUE(restarted.get_future().get());
}
void ShellTest::VSyncFlush(Shell* shell, bool* will_draw_new_frame) {
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(
shell->GetTaskRunners().GetPlatformTaskRunner(),
[shell, will_draw_new_frame, &latch] {
// The following UI task ensures that all previous UI tasks are flushed.
fml::AutoResetWaitableEvent ui_latch;
shell->GetTaskRunners().GetUITaskRunner()->PostTask(
[&ui_latch, will_draw_new_frame]() {
if (will_draw_new_frame != nullptr) {
*will_draw_new_frame = true;
}
ui_latch.Signal();
});
ShellTestPlatformView* test_platform_view =
static_cast<ShellTestPlatformView*>(shell->GetPlatformView().get());
do {
test_platform_view->SimulateVSync();
} while (ui_latch.WaitWithTimeout(fml::TimeDelta::FromMilliseconds(1)));
latch.Signal();
});
latch.Wait();
}
void ShellTest::SetViewportMetrics(Shell* shell, double width, double height) {
flutter::ViewportMetrics viewport_metrics = {
1, // device pixel ratio
width, // physical width
height, // physical height
0, // padding top
0, // padding right
0, // padding bottom
0, // padding left
0, // view inset top
0, // view inset right
0, // view inset bottom
0, // view inset left
0, // gesture inset top
0, // gesture inset right
0, // gesture inset bottom
0, // gesture inset left
22, // physical touch slop
std::vector<double>(), // display features bounds
std::vector<int>(), // display features type
std::vector<int>(), // display features state
0 // Display ID
};
// Set viewport to nonempty, and call Animator::BeginFrame to make the layer
// tree pipeline nonempty. Without either of this, the layer tree below
// won't be rasterized.
fml::AutoResetWaitableEvent latch;
shell->GetTaskRunners().GetUITaskRunner()->PostTask(
[&latch, engine = shell->weak_engine_, viewport_metrics]() {
if (engine) {
engine->SetViewportMetrics(kImplicitViewId, viewport_metrics);
const auto frame_begin_time = fml::TimePoint::Now();
const auto frame_end_time =
frame_begin_time + fml::TimeDelta::FromSecondsF(1.0 / 60.0);
std::unique_ptr<FrameTimingsRecorder> recorder =
std::make_unique<FrameTimingsRecorder>();
recorder->RecordVsync(frame_begin_time, frame_end_time);
engine->animator_->BeginFrame(std::move(recorder));
engine->animator_->EndFrame();
}
latch.Signal();
});
latch.Wait();
}
void ShellTest::NotifyIdle(Shell* shell, fml::TimeDelta deadline) {
fml::AutoResetWaitableEvent latch;
shell->GetTaskRunners().GetUITaskRunner()->PostTask(
[&latch, engine = shell->weak_engine_, deadline]() {
if (engine) {
engine->NotifyIdle(deadline);
}
latch.Signal();
});
latch.Wait();
}
void ShellTest::PumpOneFrame(Shell* shell) {
PumpOneFrame(shell, ViewContent::DummyView());
}
void ShellTest::PumpOneFrame(Shell* shell, FrameContent frame_content) {
// Set viewport to nonempty, and call Animator::BeginFrame to make the layer
// tree pipeline nonempty. Without either of this, the layer tree below
// won't be rasterized.
fml::AutoResetWaitableEvent latch;
fml::WeakPtr<RuntimeDelegate> runtime_delegate = shell->weak_engine_;
shell->GetTaskRunners().GetUITaskRunner()->PostTask(
[&latch, engine = shell->weak_engine_, &frame_content,
runtime_delegate]() {
for (auto& [view_id, view_content] : frame_content) {
engine->SetViewportMetrics(view_id, view_content.viewport_metrics);
}
const auto frame_begin_time = fml::TimePoint::Now();
const auto frame_end_time =
frame_begin_time + fml::TimeDelta::FromSecondsF(1.0 / 60.0);
std::unique_ptr<FrameTimingsRecorder> recorder =
std::make_unique<FrameTimingsRecorder>();
recorder->RecordVsync(frame_begin_time, frame_end_time);
engine->animator_->BeginFrame(std::move(recorder));
// The BeginFrame phase and the EndFrame phase must be performed in a
// single task, otherwise a normal vsync might be inserted in between,
// causing flaky assertion errors.
for (auto& [view_id, view_content] : frame_content) {
SkMatrix identity;
identity.setIdentity();
auto root_layer = std::make_shared<TransformLayer>(identity);
auto layer_tree = std::make_unique<LayerTree>(
LayerTree::Config{.root_layer = root_layer},
SkISize::Make(view_content.viewport_metrics.physical_width,
view_content.viewport_metrics.physical_height));
float device_pixel_ratio = static_cast<float>(
view_content.viewport_metrics.device_pixel_ratio);
if (view_content.builder) {
view_content.builder(root_layer);
}
runtime_delegate->Render(view_id, std::move(layer_tree),
device_pixel_ratio);
}
engine->animator_->EndFrame();
latch.Signal();
});
latch.Wait();
}
void ShellTest::DispatchFakePointerData(Shell* shell) {
auto packet = std::make_unique<PointerDataPacket>(1);
DispatchPointerData(shell, std::move(packet));
}
void ShellTest::DispatchPointerData(Shell* shell,
std::unique_ptr<PointerDataPacket> packet) {
fml::AutoResetWaitableEvent latch;
shell->GetTaskRunners().GetPlatformTaskRunner()->PostTask(
[&latch, shell, &packet]() {
// Goes through PlatformView to ensure packet is corrected converted.
shell->GetPlatformView()->DispatchPointerDataPacket(std::move(packet));
latch.Signal();
});
latch.Wait();
}
int ShellTest::UnreportedTimingsCount(Shell* shell) {
return shell->unreported_timings_.size();
}
void ShellTest::SetNeedsReportTimings(Shell* shell, bool value) {
shell->SetNeedsReportTimings(value);
}
bool ShellTest::GetNeedsReportTimings(Shell* shell) {
return shell->needs_report_timings_;
}
void ShellTest::StorePersistentCache(PersistentCache* cache,
const SkData& key,
const SkData& value) {
cache->store(key, value);
}
void ShellTest::OnServiceProtocol(
Shell* shell,
ServiceProtocolEnum some_protocol,
const fml::RefPtr<fml::TaskRunner>& task_runner,
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
std::promise<bool> finished;
fml::TaskRunner::RunNowOrPostTask(task_runner, [shell, some_protocol, params,
response, &finished]() {
switch (some_protocol) {
case ServiceProtocolEnum::kGetSkSLs:
shell->OnServiceProtocolGetSkSLs(params, response);
break;
case ServiceProtocolEnum::kEstimateRasterCacheMemory:
shell->OnServiceProtocolEstimateRasterCacheMemory(params, response);
break;
case ServiceProtocolEnum::kSetAssetBundlePath:
shell->OnServiceProtocolSetAssetBundlePath(params, response);
break;
case ServiceProtocolEnum::kRunInView:
shell->OnServiceProtocolRunInView(params, response);
break;
case ServiceProtocolEnum::kRenderFrameWithRasterStats:
shell->OnServiceProtocolRenderFrameWithRasterStats(params, response);
break;
}
finished.set_value(true);
});
finished.get_future().wait();
}
std::shared_ptr<txt::FontCollection> ShellTest::GetFontCollection(
Shell* shell) {
return shell->weak_engine_->GetFontCollection().GetFontCollection();
}
Settings ShellTest::CreateSettingsForFixture() {
Settings settings;
settings.leak_vm = false;
settings.task_observer_add = [](intptr_t key, const fml::closure& handler) {
fml::MessageLoop::GetCurrent().AddTaskObserver(key, handler);
};
settings.task_observer_remove = [](intptr_t key) {
fml::MessageLoop::GetCurrent().RemoveTaskObserver(key);
};
settings.isolate_create_callback = [this]() {
native_resolver_->SetNativeResolverForIsolate();
};
#if OS_FUCHSIA
settings.verbose_logging = true;
#endif
SetSnapshotsAndAssets(settings);
return settings;
}
TaskRunners ShellTest::GetTaskRunnersForFixture() {
return {
"test",
thread_host_.platform_thread->GetTaskRunner(), // platform
thread_host_.raster_thread->GetTaskRunner(), // raster
thread_host_.ui_thread->GetTaskRunner(), // ui
thread_host_.io_thread->GetTaskRunner() // io
};
}
fml::TimePoint ShellTest::GetLatestFrameTargetTime(Shell* shell) const {
return shell->GetLatestFrameTargetTime();
}
std::unique_ptr<Shell> ShellTest::CreateShell(
const Settings& settings,
std::optional<TaskRunners> task_runners) {
return CreateShell({
.settings = settings,
.task_runners = std::move(task_runners),
});
}
std::unique_ptr<Shell> ShellTest::CreateShell(const Config& config) {
TaskRunners task_runners = config.task_runners.has_value()
? config.task_runners.value()
: GetTaskRunnersForFixture();
Shell::CreateCallback<PlatformView> platform_view_create_callback =
config.platform_view_create_callback;
if (!platform_view_create_callback) {
platform_view_create_callback = ShellTestPlatformViewBuilder({});
}
Shell::CreateCallback<Rasterizer> rasterizer_create_callback =
[](Shell& shell) { return std::make_unique<Rasterizer>(shell); };
return Shell::Create(flutter::PlatformData(), //
task_runners, //
config.settings, //
platform_view_create_callback, //
rasterizer_create_callback, //
config.is_gpu_disabled //
);
}
void ShellTest::DestroyShell(std::unique_ptr<Shell> shell) {
DestroyShell(std::move(shell), GetTaskRunnersForFixture());
}
void ShellTest::DestroyShell(std::unique_ptr<Shell> shell,
const TaskRunners& task_runners) {
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(task_runners.GetPlatformTaskRunner(),
[&shell, &latch]() mutable {
shell.reset();
latch.Signal();
});
latch.Wait();
}
size_t ShellTest::GetLiveTrackedPathCount(
const std::shared_ptr<VolatilePathTracker>& tracker) {
return std::count_if(
tracker->paths_.begin(), tracker->paths_.end(),
[](const std::weak_ptr<VolatilePathTracker::TrackedPath>& path) {
return path.lock();
});
}
void ShellTest::TurnOffGPU(Shell* shell, bool value) {
shell->is_gpu_disabled_sync_switch_->SetSwitch(value);
}
} // namespace testing
} // namespace flutter
| engine/shell/common/shell_test.cc/0 | {
"file_path": "engine/shell/common/shell_test.cc",
"repo_id": "engine",
"token_count": 6684
} | 321 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_COMMON_SNAPSHOT_CONTROLLER_H_
#define FLUTTER_SHELL_COMMON_SNAPSHOT_CONTROLLER_H_
#include "flutter/common/settings.h"
#include "flutter/display_list/image/dl_image.h"
#include "flutter/flow/surface.h"
#include "flutter/fml/synchronization/sync_switch.h"
#include "flutter/lib/ui/snapshot_delegate.h"
#include "flutter/shell/common/snapshot_surface_producer.h"
namespace impeller {
class AiksContext;
}
namespace flutter {
class SnapshotController {
public:
class Delegate {
public:
virtual ~Delegate() = default;
virtual const std::unique_ptr<Surface>& GetSurface() const = 0;
virtual std::shared_ptr<impeller::AiksContext> GetAiksContext() const = 0;
virtual const std::unique_ptr<SnapshotSurfaceProducer>&
GetSnapshotSurfaceProducer() const = 0;
virtual std::shared_ptr<const fml::SyncSwitch> GetIsGpuDisabledSyncSwitch()
const = 0;
};
static std::unique_ptr<SnapshotController> Make(const Delegate& delegate,
const Settings& settings);
virtual ~SnapshotController() = default;
// Note that this image is not guaranteed to be UIThreadSafe and must
// be converted to a DlImageGPU if it is to be handed back to the UI
// thread.
virtual sk_sp<DlImage> MakeRasterSnapshot(sk_sp<DisplayList> display_list,
SkISize size) = 0;
virtual sk_sp<SkImage> ConvertToRasterImage(sk_sp<SkImage> image) = 0;
protected:
explicit SnapshotController(const Delegate& delegate);
const Delegate& GetDelegate() { return delegate_; }
private:
const Delegate& delegate_;
FML_DISALLOW_COPY_AND_ASSIGN(SnapshotController);
};
} // namespace flutter
#endif // FLUTTER_SHELL_COMMON_SNAPSHOT_CONTROLLER_H_
| engine/shell/common/snapshot_controller.h/0 | {
"file_path": "engine/shell/common/snapshot_controller.h",
"repo_id": "engine",
"token_count": 730
} | 322 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_COMMON_VSYNC_WAITER_H_
#define FLUTTER_SHELL_COMMON_VSYNC_WAITER_H_
#include <functional>
#include <memory>
#include <mutex>
#include <unordered_map>
#include "flutter/common/task_runners.h"
#include "flutter/flow/frame_timings.h"
#include "flutter/fml/time/time_point.h"
namespace flutter {
/// Abstract Base Class that represents a platform specific mechanism for
/// getting callbacks when a vsync event happens.
///
/// @see VsyncWaiterAndroid
/// @see VsyncWaiterEmbedder
class VsyncWaiter : public std::enable_shared_from_this<VsyncWaiter> {
public:
using Callback = std::function<void(std::unique_ptr<FrameTimingsRecorder>)>;
virtual ~VsyncWaiter();
void AsyncWaitForVsync(const Callback& callback);
/// Add a secondary callback for key |id| for the next vsync.
///
/// See also |PointerDataDispatcher::ScheduleSecondaryVsyncCallback| and
/// |Animator::ScheduleMaybeClearTraceFlowIds|.
void ScheduleSecondaryCallback(uintptr_t id, const fml::closure& callback);
protected:
// On some backends, the |FireCallback| needs to be made from a static C
// method.
friend class VsyncWaiterAndroid;
friend class VsyncWaiterEmbedder;
const TaskRunners task_runners_;
explicit VsyncWaiter(const TaskRunners& task_runners);
// There are two distinct situations where VsyncWaiter wishes to awaken at
// the next vsync. Although the functionality can be the same, the intent is
// different, therefore it makes sense to have a method for each intent.
// The intent of AwaitVSync() is that the Animator wishes to produce a frame.
// The underlying implementation can choose to be aware of this intent when
// it comes to implementing backpressure and other scheduling invariants.
//
// Implementations are meant to override this method and arm their vsync
// latches when in response to this invocation. On vsync, they are meant to
// invoke the |FireCallback| method once (and only once) with the appropriate
// arguments. This method should not block the current thread.
virtual void AwaitVSync() = 0;
// The intent of AwaitVSyncForSecondaryCallback() is simply to wake up at the
// next vsync.
//
// Because there is no association with frame scheduling, underlying
// implementations do not need to worry about maintaining invariants or
// backpressure. The default implementation is to simply follow the same logic
// as AwaitVSync().
virtual void AwaitVSyncForSecondaryCallback() { AwaitVSync(); }
// Schedules the callback on the UI task runner. Needs to be invoked as close
// to the `frame_start_time` as possible.
void FireCallback(fml::TimePoint frame_start_time,
fml::TimePoint frame_target_time,
bool pause_secondary_tasks = true);
private:
std::mutex callback_mutex_;
Callback callback_;
std::unordered_map<uintptr_t, fml::closure> secondary_callbacks_;
void PauseDartEventLoopTasks();
static void ResumeDartEventLoopTasks(fml::TaskQueueId ui_task_queue_id);
FML_DISALLOW_COPY_AND_ASSIGN(VsyncWaiter);
};
} // namespace flutter
#endif // FLUTTER_SHELL_COMMON_VSYNC_WAITER_H_
| engine/shell/common/vsync_waiter.h/0 | {
"file_path": "engine/shell/common/vsync_waiter.h",
"repo_id": "engine",
"token_count": 1010
} | 323 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_GPU_GPU_SURFACE_METAL_DELEGATE_H_
#define FLUTTER_SHELL_GPU_GPU_SURFACE_METAL_DELEGATE_H_
#include <stdint.h>
#include "flutter/fml/macros.h"
#include "third_party/skia/include/core/SkSize.h"
#include "third_party/skia/include/core/SkSurface.h"
#include "third_party/skia/include/gpu/ganesh/mtl/GrMtlTypes.h"
namespace flutter {
// expected to be id<MTLDevice>
typedef void* GPUMTLDeviceHandle;
// expected to be id<MTLCommandQueues>
typedef void* GPUMTLCommandQueueHandle;
// expected to be CAMetalLayer*
typedef void* GPUCAMetalLayerHandle;
// expected to be id<MTLTexture>
typedef const void* GPUMTLTextureHandle;
typedef void (*GPUMTLDestructionCallback)(void* /* destruction_context */);
struct GPUMTLTextureInfo {
int64_t texture_id;
GPUMTLTextureHandle texture;
GPUMTLDestructionCallback destruction_callback;
void* destruction_context;
};
enum class MTLRenderTargetType { kMTLTexture, kCAMetalLayer };
//------------------------------------------------------------------------------
/// @brief Interface implemented by all platform surfaces that can present
/// a metal backing store to the "screen". The GPU surface
/// abstraction (which abstracts the client rendering API) uses this
/// delegation pattern to tell the platform surface (which abstracts
/// how backing stores fulfilled by the selected client rendering
/// API end up on the "screen" on a particular platform) when the
/// rasterizer needs to allocate and present the software backing
/// store.
///
/// @see |IOSurfaceMetal| and |EmbedderSurfaceMetal|.
///
class GPUSurfaceMetalDelegate {
public:
//------------------------------------------------------------------------------
/// @brief Construct a new GPUSurfaceMetalDelegate object with the specified
/// render_target type.
///
/// @see |MTLRenderTargetType|
///
explicit GPUSurfaceMetalDelegate(MTLRenderTargetType render_target);
virtual ~GPUSurfaceMetalDelegate();
//------------------------------------------------------------------------------
/// @brief Returns the handle to the CAMetalLayer to render to. This is only
/// called when the specified render target type is `kCAMetalLayer`.
///
virtual GPUCAMetalLayerHandle GetCAMetalLayer(
const SkISize& frame_info) const = 0;
//------------------------------------------------------------------------------
/// @brief Presents the drawable to the "screen". The drawable is obtained
/// from the CAMetalLayer that given by `GetCAMetalLayer` call. This is only
/// called when the specified render target type in `kCAMetalLayer`.
///
/// @see |GPUSurfaceMetalDelegate::GetCAMetalLayer|
///
virtual bool PresentDrawable(GrMTLHandle drawable) const = 0;
//------------------------------------------------------------------------------
/// @brief Returns the handle to the MTLTexture to render to. This is only
/// called when the specified render target type is `kMTLTexture`.
///
virtual GPUMTLTextureInfo GetMTLTexture(const SkISize& frame_info) const = 0;
//------------------------------------------------------------------------------
/// @brief Presents the texture with `texture_id` to the "screen".
/// `texture_id` corresponds to a texture that has been obtained by an earlier
/// call to `GetMTLTexture`. This is only called when the specified render
/// target type is `kMTLTexture`.
///
/// @see |GPUSurfaceMetalDelegate::GetMTLTexture|
///
virtual bool PresentTexture(GPUMTLTextureInfo texture) const = 0;
//------------------------------------------------------------------------------
/// @brief Whether to allow drawing to the surface when the GPU is disabled
///
virtual bool AllowsDrawingWhenGpuDisabled() const;
MTLRenderTargetType GetRenderTargetType();
private:
const MTLRenderTargetType render_target_type_;
};
} // namespace flutter
#endif // FLUTTER_SHELL_GPU_GPU_SURFACE_METAL_DELEGATE_H_
| engine/shell/gpu/gpu_surface_metal_delegate.h/0 | {
"file_path": "engine/shell/gpu/gpu_surface_metal_delegate.h",
"repo_id": "engine",
"token_count": 1224
} | 324 |
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//flutter/shell/platform/config.gni")
group("platform") {
if (is_mac || is_ios) {
deps = [ "darwin" ]
} else if (is_android) {
deps = [ "android" ]
} else if (is_linux) {
deps = []
if (enable_desktop_embeddings) {
deps += [ "linux" ]
}
} else if (is_win) {
deps = []
if (enable_desktop_embeddings) {
deps += [ "windows" ]
}
} else if (is_fuchsia) {
import("//flutter/tools/fuchsia/gn-sdk/src/gn_configs.gni")
deps = [ "fuchsia" ]
} else {
assert(false, "Unknown/Unsupported platform.")
}
}
| engine/shell/platform/BUILD.gn/0 | {
"file_path": "engine/shell/platform/BUILD.gn",
"repo_id": "engine",
"token_count": 303
} | 325 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_EGL_SURFACE_H_
#define FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_EGL_SURFACE_H_
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <KHR/khrplatform.h>
#include <optional>
#include "flutter/fml/macros.h"
#include "flutter/fml/time/time_point.h"
#include "flutter/shell/platform/android/android_environment_gl.h"
#include "third_party/skia/include/core/SkRect.h"
namespace flutter {
//------------------------------------------------------------------------------
/// Holds an `EGLSurface` reference.
///
///
/// This can be used in conjunction to unique_ptr to provide better guarantees
/// about the lifespan of the `EGLSurface` object.
///
class AndroidEGLSurfaceDamage;
/// Result of calling MakeCurrent on AndroidEGLSurface.
enum class AndroidEGLSurfaceMakeCurrentStatus {
/// Success, the egl context for the surface was already current.
kSuccessAlreadyCurrent,
/// Success, the egl context for the surface made current.
kSuccessMadeCurrent,
/// Failed to make the egl context for the surface current.
kFailure,
};
void LogLastEGLError();
class AndroidEGLSurface {
public:
AndroidEGLSurface(EGLSurface surface, EGLDisplay display, EGLContext context);
~AndroidEGLSurface();
//----------------------------------------------------------------------------
/// @return Whether the current `EGLSurface` reference is valid. That is,
/// if
/// the surface doesn't point to `EGL_NO_SURFACE`.
///
bool IsValid() const;
//----------------------------------------------------------------------------
/// @brief Binds the EGLContext context to the current rendering thread
/// and to the draw and read surface.
///
/// @return Whether the surface was made current.
///
AndroidEGLSurfaceMakeCurrentStatus MakeCurrent() const;
//----------------------------------------------------------------------------
///
/// @return Whether target surface supports partial repaint.
///
bool SupportsPartialRepaint() const;
//----------------------------------------------------------------------------
/// @brief This is the minimal area that needs to be repainted to get
/// correct result.
///
/// With double or triple buffering this buffer content may lag behind
/// current front buffer and the rect accounts for accumulated damage.
///
/// @return The area of current surface where it is behind front buffer.
///
std::optional<SkIRect> InitialDamage();
//----------------------------------------------------------------------------
/// @brief Sets the damage region for current surface. Corresponds to
// eglSetDamageRegionKHR
void SetDamageRegion(const std::optional<SkIRect>& buffer_damage);
//----------------------------------------------------------------------------
/// @brief Sets the presentation time for the current surface. This
// corresponds to calling eglPresentationTimeAndroid when
// available.
bool SetPresentationTime(const fml::TimePoint& presentation_time);
//----------------------------------------------------------------------------
/// @brief This only applies to on-screen surfaces such as those created
/// by `AndroidContextGL::CreateOnscreenSurface`.
///
/// @return Whether the EGL surface color buffer was swapped.
///
bool SwapBuffers(const std::optional<SkIRect>& surface_damage);
//----------------------------------------------------------------------------
/// @return The size of an `EGLSurface`.
///
SkISize GetSize() const;
private:
/// Returns true if the EGLContext held is current for the display and surface
bool IsContextCurrent() const;
const EGLSurface surface_;
const EGLDisplay display_;
const EGLContext context_;
std::unique_ptr<AndroidEGLSurfaceDamage> damage_;
PFNEGLPRESENTATIONTIMEANDROIDPROC presentation_time_proc_ = nullptr;
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_EGL_SURFACE_H_
| engine/shell/platform/android/android_egl_surface.h/0 | {
"file_path": "engine/shell/platform/android/android_egl_surface.h",
"repo_id": "engine",
"token_count": 1209
} | 326 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_SURFACE_VULKAN_IMPELLER_H_
#define FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_SURFACE_VULKAN_IMPELLER_H_
#include "flutter/fml/concurrent_message_loop.h"
#include "flutter/fml/macros.h"
#include "flutter/impeller/renderer/backend/vulkan/surface_context_vk.h"
#include "flutter/shell/platform/android/android_context_vulkan_impeller.h"
#include "flutter/shell/platform/android/surface/android_native_window.h"
#include "flutter/shell/platform/android/surface/android_surface.h"
namespace flutter {
class AndroidSurfaceVulkanImpeller : public AndroidSurface {
public:
explicit AndroidSurfaceVulkanImpeller(
const std::shared_ptr<AndroidContextVulkanImpeller>& android_context);
~AndroidSurfaceVulkanImpeller() override;
// |AndroidSurface|
bool IsValid() const override;
// |AndroidSurface|
std::unique_ptr<Surface> CreateGPUSurface(
GrDirectContext* gr_context) override;
// |AndroidSurface|
void TeardownOnScreenContext() override;
// |AndroidSurface|
bool OnScreenSurfaceResize(const SkISize& size) override;
// |AndroidSurface|
bool ResourceContextMakeCurrent() override;
// |AndroidSurface|
bool ResourceContextClearCurrent() override;
// |AndroidSurface|
std::shared_ptr<impeller::Context> GetImpellerContext() override;
// |AndroidSurface|
bool SetNativeWindow(fml::RefPtr<AndroidNativeWindow> window) override;
private:
std::shared_ptr<impeller::SurfaceContextVK> surface_context_vk_;
fml::RefPtr<AndroidNativeWindow> native_window_;
bool is_valid_ = false;
FML_DISALLOW_COPY_AND_ASSIGN(AndroidSurfaceVulkanImpeller);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_SURFACE_VULKAN_IMPELLER_H_
| engine/shell/platform/android/android_surface_vulkan_impeller.h/0 | {
"file_path": "engine/shell/platform/android/android_surface_vulkan_impeller.h",
"repo_id": "engine",
"token_count": 662
} | 327 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_ANDROID_FLUTTER_MAIN_H_
#define FLUTTER_SHELL_PLATFORM_ANDROID_FLUTTER_MAIN_H_
#include <jni.h>
#include "flutter/common/settings.h"
#include "flutter/fml/macros.h"
#include "flutter/runtime/dart_service_isolate.h"
namespace flutter {
class FlutterMain {
public:
~FlutterMain();
static bool Register(JNIEnv* env);
static FlutterMain& Get();
const flutter::Settings& GetSettings() const;
static AndroidRenderingAPI SelectedRenderingAPI(
const flutter::Settings& settings);
private:
const flutter::Settings settings_;
DartServiceIsolate::CallbackHandle vm_service_uri_callback_ = 0;
explicit FlutterMain(const flutter::Settings& settings);
static void Init(JNIEnv* env,
jclass clazz,
jobject context,
jobjectArray jargs,
jstring kernelPath,
jstring appStoragePath,
jstring engineCachesPath,
jlong initTimeMillis);
void SetupDartVMServiceUriCallback(JNIEnv* env);
FML_DISALLOW_COPY_AND_ASSIGN(FlutterMain);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_ANDROID_FLUTTER_MAIN_H_
| engine/shell/platform/android/flutter_main.h/0 | {
"file_path": "engine/shell/platform/android/flutter_main.h",
"repo_id": "engine",
"token_count": 558
} | 328 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.app;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.res.Configuration;
import android.os.Bundle;
import androidx.annotation.NonNull;
import io.flutter.app.FlutterActivityDelegate.ViewFactory;
import io.flutter.plugin.common.PluginRegistry;
import io.flutter.view.FlutterNativeView;
import io.flutter.view.FlutterView;
/**
* Deprecated base class for activities that use Flutter.
*
* @deprecated {@link io.flutter.embedding.android.FlutterActivity} is the new API that now replaces
* this class. See https://flutter.dev/go/android-project-migration for more migration details.
*/
@Deprecated
public class FlutterActivity extends Activity
implements FlutterView.Provider, PluginRegistry, ViewFactory {
private static final String TAG = "FlutterActivity";
private final FlutterActivityDelegate delegate = new FlutterActivityDelegate(this, this);
// These aliases ensure that the methods we forward to the delegate adhere
// to relevant interfaces versus just existing in FlutterActivityDelegate.
private final FlutterActivityEvents eventDelegate = delegate;
private final FlutterView.Provider viewProvider = delegate;
private final PluginRegistry pluginRegistry = delegate;
/**
* Returns the Flutter view used by this activity; will be null before {@link #onCreate(Bundle)}
* is called.
*/
@Override
public FlutterView getFlutterView() {
return viewProvider.getFlutterView();
}
/**
* Hook for subclasses to customize the creation of the {@code FlutterView}.
*
* <p>The default implementation returns {@code null}, which will cause the activity to use a
* newly instantiated full-screen view.
*/
@Override
public FlutterView createFlutterView(Context context) {
return null;
}
/**
* Hook for subclasses to customize the creation of the {@code FlutterNativeView}.
*
* <p>The default implementation returns {@code null}, which will cause the activity to use a
* newly instantiated native view object.
*/
@Override
public FlutterNativeView createFlutterNativeView() {
return null;
}
@Override
public boolean retainFlutterNativeView() {
return false;
}
@Override
public final boolean hasPlugin(String key) {
return pluginRegistry.hasPlugin(key);
}
@Override
public final <T> T valuePublishedByPlugin(String pluginKey) {
return pluginRegistry.valuePublishedByPlugin(pluginKey);
}
@Override
public final Registrar registrarFor(String pluginKey) {
return pluginRegistry.registrarFor(pluginKey);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
eventDelegate.onCreate(savedInstanceState);
}
@Override
protected void onStart() {
super.onStart();
eventDelegate.onStart();
}
@Override
protected void onResume() {
super.onResume();
eventDelegate.onResume();
}
@Override
protected void onDestroy() {
eventDelegate.onDestroy();
super.onDestroy();
}
@Override
public void onBackPressed() {
if (!eventDelegate.onBackPressed()) {
super.onBackPressed();
}
}
@Override
protected void onStop() {
eventDelegate.onStop();
super.onStop();
}
@Override
protected void onPause() {
super.onPause();
eventDelegate.onPause();
}
@Override
protected void onPostResume() {
super.onPostResume();
eventDelegate.onPostResume();
}
// @Override - added in API level 23
public void onRequestPermissionsResult(
int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
eventDelegate.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (!eventDelegate.onActivityResult(requestCode, resultCode, data)) {
super.onActivityResult(requestCode, resultCode, data);
}
}
@Override
protected void onNewIntent(Intent intent) {
eventDelegate.onNewIntent(intent);
}
@Override
public void onUserLeaveHint() {
eventDelegate.onUserLeaveHint();
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
eventDelegate.onWindowFocusChanged(hasFocus);
}
@Override
public void onTrimMemory(int level) {
eventDelegate.onTrimMemory(level);
}
@Override
public void onLowMemory() {
eventDelegate.onLowMemory();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
eventDelegate.onConfigurationChanged(newConfig);
}
}
| engine/shell/platform/android/io/flutter/app/FlutterActivity.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/app/FlutterActivity.java",
"repo_id": "engine",
"token_count": 1489
} | 329 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.android;
import static io.flutter.Build.API_LEVELS;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ColorSpace;
import android.graphics.PixelFormat;
import android.hardware.HardwareBuffer;
import android.media.Image;
import android.media.Image.Plane;
import android.media.ImageReader;
import android.util.AttributeSet;
import android.view.Surface;
import android.view.View;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import io.flutter.Log;
import io.flutter.embedding.engine.renderer.FlutterRenderer;
import io.flutter.embedding.engine.renderer.RenderSurface;
import java.nio.ByteBuffer;
import java.util.Locale;
/**
* Paints a Flutter UI provided by an {@link android.media.ImageReader} onto a {@link
* android.graphics.Canvas}.
*
* <p>A {@code FlutterImageView} is intended for situations where a developer needs to render a
* Flutter UI, but also needs to render an interactive {@link
* io.flutter.plugin.platform.PlatformView}.
*
* <p>This {@code View} takes an {@link android.media.ImageReader} that provides the Flutter UI in
* an {@link android.media.Image} and renders it to the {@link android.graphics.Canvas} in {@code
* onDraw}.
*/
public class FlutterImageView extends View implements RenderSurface {
private static final String TAG = "FlutterImageView";
@NonNull private ImageReader imageReader;
@Nullable private Image currentImage;
@Nullable private Bitmap currentBitmap;
@Nullable private FlutterRenderer flutterRenderer;
public ImageReader getImageReader() {
return imageReader;
}
public enum SurfaceKind {
/** Displays the background canvas. */
background,
/** Displays the overlay surface canvas. */
overlay,
}
/** The kind of surface. */
private SurfaceKind kind;
/** Whether the view is attached to the Flutter render. */
private boolean isAttachedToFlutterRenderer = false;
/**
* Constructs a {@code FlutterImageView} with an {@link android.media.ImageReader} that provides
* the Flutter UI.
*/
public FlutterImageView(@NonNull Context context, int width, int height, SurfaceKind kind) {
this(context, createImageReader(width, height), kind);
}
public FlutterImageView(@NonNull Context context) {
this(context, 1, 1, SurfaceKind.background);
}
public FlutterImageView(@NonNull Context context, @NonNull AttributeSet attrs) {
this(context, 1, 1, SurfaceKind.background);
}
@VisibleForTesting
/*package*/ FlutterImageView(
@NonNull Context context, @NonNull ImageReader imageReader, SurfaceKind kind) {
super(context, null);
this.imageReader = imageReader;
this.kind = kind;
init();
}
private void init() {
setAlpha(0.0f);
}
private static void logW(String format, Object... args) {
Log.w(TAG, String.format(Locale.US, format, args));
}
@SuppressLint("WrongConstant") // RGBA_8888 is a valid constant.
@NonNull
private static ImageReader createImageReader(int width, int height) {
if (width <= 0) {
logW("ImageReader width must be greater than 0, but given width=%d, set width=1", width);
width = 1;
}
if (height <= 0) {
logW("ImageReader height must be greater than 0, but given height=%d, set height=1", height);
height = 1;
}
if (android.os.Build.VERSION.SDK_INT >= API_LEVELS.API_29) {
return ImageReader.newInstance(
width,
height,
PixelFormat.RGBA_8888,
3,
HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT);
} else {
return ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, 3);
}
}
@NonNull
public Surface getSurface() {
return imageReader.getSurface();
}
@Nullable
@Override
public FlutterRenderer getAttachedRenderer() {
return flutterRenderer;
}
/**
* Invoked by the owner of this {@code FlutterImageView} when it wants to begin rendering a
* Flutter UI to this {@code FlutterImageView}.
*/
@Override
public void attachToRenderer(@NonNull FlutterRenderer flutterRenderer) {
switch (kind) {
case background:
flutterRenderer.swapSurface(imageReader.getSurface());
break;
case overlay:
// Do nothing since the attachment is done by the handler of
// `FlutterJNI#createOverlaySurface()` in the native side.
break;
}
setAlpha(1.0f);
this.flutterRenderer = flutterRenderer;
isAttachedToFlutterRenderer = true;
}
/**
* Invoked by the owner of this {@code FlutterImageView} when it no longer wants to render a
* Flutter UI to this {@code FlutterImageView}.
*/
public void detachFromRenderer() {
if (!isAttachedToFlutterRenderer) {
return;
}
setAlpha(0.0f);
// Drop the latest image as it shouldn't render this image if this view is
// attached to the renderer again.
acquireLatestImage();
// Clear drawings.
currentBitmap = null;
// Close and clear the current image if any.
closeCurrentImage();
invalidate();
isAttachedToFlutterRenderer = false;
}
public void pause() {
// Not supported.
}
public void resume() {
// Not supported.
}
/**
* Acquires the next image to be drawn to the {@link android.graphics.Canvas}. Returns true if
* there's an image available in the queue.
*/
public boolean acquireLatestImage() {
if (!isAttachedToFlutterRenderer) {
return false;
}
// 1. `acquireLatestImage()` may return null if no new image is available.
// 2. There's no guarantee that `onDraw()` is called after `invalidate()`.
// For example, the device may not produce new frames if it's in sleep mode
// or some special Android devices so the calls to `invalidate()` queued up
// until the device produces a new frame.
// 3. While the engine will also stop producing frames, there is a race condition.
final Image newImage = imageReader.acquireLatestImage();
if (newImage != null) {
// Only close current image after acquiring valid new image
closeCurrentImage();
currentImage = newImage;
invalidate();
}
return newImage != null;
}
/** Creates a new image reader with the provided size. */
public void resizeIfNeeded(int width, int height) {
if (flutterRenderer == null) {
return;
}
if (width == imageReader.getWidth() && height == imageReader.getHeight()) {
return;
}
// Close resources.
closeCurrentImage();
// Close the current image reader, then create a new one with the new size.
// Image readers cannot be resized once created.
closeImageReader();
imageReader = createImageReader(width, height);
}
/**
* Closes the image reader associated with the current {@code FlutterImageView}.
*
* <p>Once the image reader is closed, calling {@code acquireLatestImage} will result in an {@code
* IllegalStateException}.
*/
public void closeImageReader() {
imageReader.close();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (currentImage != null) {
updateCurrentBitmap();
}
if (currentBitmap != null) {
canvas.drawBitmap(currentBitmap, 0, 0, null);
}
}
private void closeCurrentImage() {
// Close and clear the current image if any.
if (currentImage != null) {
currentImage.close();
currentImage = null;
}
}
@TargetApi(API_LEVELS.API_29)
private void updateCurrentBitmap() {
if (android.os.Build.VERSION.SDK_INT >= API_LEVELS.API_29) {
final HardwareBuffer buffer = currentImage.getHardwareBuffer();
currentBitmap = Bitmap.wrapHardwareBuffer(buffer, ColorSpace.get(ColorSpace.Named.SRGB));
buffer.close();
} else {
final Plane[] imagePlanes = currentImage.getPlanes();
if (imagePlanes.length != 1) {
return;
}
final Plane imagePlane = imagePlanes[0];
final int desiredWidth = imagePlane.getRowStride() / imagePlane.getPixelStride();
final int desiredHeight = currentImage.getHeight();
if (currentBitmap == null
|| currentBitmap.getWidth() != desiredWidth
|| currentBitmap.getHeight() != desiredHeight) {
currentBitmap =
Bitmap.createBitmap(
desiredWidth, desiredHeight, android.graphics.Bitmap.Config.ARGB_8888);
}
ByteBuffer buffer = imagePlane.getBuffer();
buffer.rewind();
currentBitmap.copyPixelsFromBuffer(buffer);
}
}
@Override
protected void onSizeChanged(int width, int height, int oldWidth, int oldHeight) {
if (width == imageReader.getWidth() && height == imageReader.getHeight()) {
return;
}
// `SurfaceKind.overlay` isn't resized. Instead, the `FlutterImageView` instance
// is destroyed. As a result, an instance with the new size is created by the surface
// pool in the native side.
if (kind == SurfaceKind.background && isAttachedToFlutterRenderer) {
resizeIfNeeded(width, height);
// Bind native window to the new surface, and create a new onscreen surface
// with the new size in the native side.
flutterRenderer.swapSurface(imageReader.getSurface());
}
}
}
| engine/shell/platform/android/io/flutter/embedding/android/FlutterImageView.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/android/FlutterImageView.java",
"repo_id": "engine",
"token_count": 3302
} | 330 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine;
import android.app.Activity;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.ContentProvider;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.lifecycle.Lifecycle;
import io.flutter.Log;
import io.flutter.embedding.android.ExclusiveAppComponent;
import io.flutter.embedding.engine.loader.FlutterLoader;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.embedding.engine.plugins.PluginRegistry;
import io.flutter.embedding.engine.plugins.activity.ActivityAware;
import io.flutter.embedding.engine.plugins.activity.ActivityControlSurface;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.embedding.engine.plugins.broadcastreceiver.BroadcastReceiverAware;
import io.flutter.embedding.engine.plugins.broadcastreceiver.BroadcastReceiverControlSurface;
import io.flutter.embedding.engine.plugins.broadcastreceiver.BroadcastReceiverPluginBinding;
import io.flutter.embedding.engine.plugins.contentprovider.ContentProviderAware;
import io.flutter.embedding.engine.plugins.contentprovider.ContentProviderControlSurface;
import io.flutter.embedding.engine.plugins.contentprovider.ContentProviderPluginBinding;
import io.flutter.embedding.engine.plugins.lifecycle.HiddenLifecycleReference;
import io.flutter.embedding.engine.plugins.service.ServiceAware;
import io.flutter.embedding.engine.plugins.service.ServiceControlSurface;
import io.flutter.embedding.engine.plugins.service.ServicePluginBinding;
import io.flutter.util.TraceSection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* This class is owned by the {@link io.flutter.embedding.engine.FlutterEngine} and its role is to
* managed its connections with Android App Components and Flutter plugins.
*
* <p>It enforces the {0|1}:1 relationship between activity and engine, and propagates the app
* component connection to the plugins.
*/
/* package */ class FlutterEngineConnectionRegistry
implements PluginRegistry,
ActivityControlSurface,
ServiceControlSurface,
BroadcastReceiverControlSurface,
ContentProviderControlSurface {
private static final String TAG = "FlutterEngineCxnRegstry";
// PluginRegistry
@NonNull
private final Map<Class<? extends FlutterPlugin>, FlutterPlugin> plugins = new HashMap<>();
// Standard FlutterPlugin
@NonNull private final FlutterEngine flutterEngine;
@NonNull private final FlutterPlugin.FlutterPluginBinding pluginBinding;
// ActivityAware
@NonNull
private final Map<Class<? extends FlutterPlugin>, ActivityAware> activityAwarePlugins =
new HashMap<>();
@Nullable private ExclusiveAppComponent<Activity> exclusiveActivity;
@Nullable private FlutterEngineActivityPluginBinding activityPluginBinding;
private boolean isWaitingForActivityReattachment = false;
// ServiceAware
@NonNull
private final Map<Class<? extends FlutterPlugin>, ServiceAware> serviceAwarePlugins =
new HashMap<>();
@Nullable private Service service;
@Nullable private FlutterEngineServicePluginBinding servicePluginBinding;
// BroadcastReceiver
@NonNull
private final Map<Class<? extends FlutterPlugin>, BroadcastReceiverAware>
broadcastReceiverAwarePlugins = new HashMap<>();
@Nullable private BroadcastReceiver broadcastReceiver;
@Nullable private FlutterEngineBroadcastReceiverPluginBinding broadcastReceiverPluginBinding;
// ContentProvider
@NonNull
private final Map<Class<? extends FlutterPlugin>, ContentProviderAware>
contentProviderAwarePlugins = new HashMap<>();
@Nullable private ContentProvider contentProvider;
@Nullable private FlutterEngineContentProviderPluginBinding contentProviderPluginBinding;
FlutterEngineConnectionRegistry(
@NonNull Context appContext,
@NonNull FlutterEngine flutterEngine,
@NonNull FlutterLoader flutterLoader,
@Nullable FlutterEngineGroup group) {
this.flutterEngine = flutterEngine;
pluginBinding =
new FlutterPlugin.FlutterPluginBinding(
appContext,
flutterEngine,
flutterEngine.getDartExecutor(),
flutterEngine.getRenderer(),
flutterEngine.getPlatformViewsController().getRegistry(),
new DefaultFlutterAssets(flutterLoader),
group);
}
public void destroy() {
Log.v(TAG, "Destroying.");
// Detach from any Android component that we may currently be attached to, e.g., Activity,
// Service, BroadcastReceiver, ContentProvider. This must happen before removing all plugins so
// that the plugins have an opportunity to clean up references as a result of component
// detachment.
detachFromAppComponent();
// Remove all registered plugins.
removeAll();
}
@Override
public void add(@NonNull FlutterPlugin plugin) {
try (TraceSection e =
TraceSection.scoped(
"FlutterEngineConnectionRegistry#add " + plugin.getClass().getSimpleName())) {
if (has(plugin.getClass())) {
Log.w(
TAG,
"Attempted to register plugin ("
+ plugin
+ ") but it was "
+ "already registered with this FlutterEngine ("
+ flutterEngine
+ ").");
return;
}
Log.v(TAG, "Adding plugin: " + plugin);
// Add the plugin to our generic set of plugins and notify the plugin
// that is has been attached to an engine.
plugins.put(plugin.getClass(), plugin);
plugin.onAttachedToEngine(pluginBinding);
// For ActivityAware plugins, add the plugin to our set of ActivityAware
// plugins, and if this engine is currently attached to an Activity,
// notify the ActivityAware plugin that it is now attached to an Activity.
if (plugin instanceof ActivityAware) {
ActivityAware activityAware = (ActivityAware) plugin;
activityAwarePlugins.put(plugin.getClass(), activityAware);
if (isAttachedToActivity()) {
activityAware.onAttachedToActivity(activityPluginBinding);
}
}
// For ServiceAware plugins, add the plugin to our set of ServiceAware
// plugins, and if this engine is currently attached to a Service,
// notify the ServiceAware plugin that it is now attached to a Service.
if (plugin instanceof ServiceAware) {
ServiceAware serviceAware = (ServiceAware) plugin;
serviceAwarePlugins.put(plugin.getClass(), serviceAware);
if (isAttachedToService()) {
serviceAware.onAttachedToService(servicePluginBinding);
}
}
// For BroadcastReceiverAware plugins, add the plugin to our set of BroadcastReceiverAware
// plugins, and if this engine is currently attached to a BroadcastReceiver,
// notify the BroadcastReceiverAware plugin that it is now attached to a BroadcastReceiver.
if (plugin instanceof BroadcastReceiverAware) {
BroadcastReceiverAware broadcastReceiverAware = (BroadcastReceiverAware) plugin;
broadcastReceiverAwarePlugins.put(plugin.getClass(), broadcastReceiverAware);
if (isAttachedToBroadcastReceiver()) {
broadcastReceiverAware.onAttachedToBroadcastReceiver(broadcastReceiverPluginBinding);
}
}
// For ContentProviderAware plugins, add the plugin to our set of ContentProviderAware
// plugins, and if this engine is currently attached to a ContentProvider,
// notify the ContentProviderAware plugin that it is now attached to a ContentProvider.
if (plugin instanceof ContentProviderAware) {
ContentProviderAware contentProviderAware = (ContentProviderAware) plugin;
contentProviderAwarePlugins.put(plugin.getClass(), contentProviderAware);
if (isAttachedToContentProvider()) {
contentProviderAware.onAttachedToContentProvider(contentProviderPluginBinding);
}
}
}
}
@Override
public void add(@NonNull Set<FlutterPlugin> plugins) {
for (FlutterPlugin plugin : plugins) {
add(plugin);
}
}
@Override
public boolean has(@NonNull Class<? extends FlutterPlugin> pluginClass) {
return plugins.containsKey(pluginClass);
}
@Override
public FlutterPlugin get(@NonNull Class<? extends FlutterPlugin> pluginClass) {
return plugins.get(pluginClass);
}
@Override
public void remove(@NonNull Class<? extends FlutterPlugin> pluginClass) {
FlutterPlugin plugin = plugins.get(pluginClass);
if (plugin == null) {
return;
}
try (TraceSection e =
TraceSection.scoped(
"FlutterEngineConnectionRegistry#remove " + pluginClass.getSimpleName())) {
// For ActivityAware plugins, notify the plugin that it is detached from
// an Activity if an Activity is currently attached to this engine. Then
// remove the plugin from our set of ActivityAware plugins.
if (plugin instanceof ActivityAware) {
if (isAttachedToActivity()) {
ActivityAware activityAware = (ActivityAware) plugin;
activityAware.onDetachedFromActivity();
}
activityAwarePlugins.remove(pluginClass);
}
// For ServiceAware plugins, notify the plugin that it is detached from
// a Service if a Service is currently attached to this engine. Then
// remove the plugin from our set of ServiceAware plugins.
if (plugin instanceof ServiceAware) {
if (isAttachedToService()) {
ServiceAware serviceAware = (ServiceAware) plugin;
serviceAware.onDetachedFromService();
}
serviceAwarePlugins.remove(pluginClass);
}
// For BroadcastReceiverAware plugins, notify the plugin that it is detached from
// a BroadcastReceiver if a BroadcastReceiver is currently attached to this engine. Then
// remove the plugin from our set of BroadcastReceiverAware plugins.
if (plugin instanceof BroadcastReceiverAware) {
if (isAttachedToBroadcastReceiver()) {
BroadcastReceiverAware broadcastReceiverAware = (BroadcastReceiverAware) plugin;
broadcastReceiverAware.onDetachedFromBroadcastReceiver();
}
broadcastReceiverAwarePlugins.remove(pluginClass);
}
// For ContentProviderAware plugins, notify the plugin that it is detached from
// a ContentProvider if a ContentProvider is currently attached to this engine. Then
// remove the plugin from our set of ContentProviderAware plugins.
if (plugin instanceof ContentProviderAware) {
if (isAttachedToContentProvider()) {
ContentProviderAware contentProviderAware = (ContentProviderAware) plugin;
contentProviderAware.onDetachedFromContentProvider();
}
contentProviderAwarePlugins.remove(pluginClass);
}
// Notify the plugin that is now detached from this engine. Then remove
// it from our set of generic plugins.
plugin.onDetachedFromEngine(pluginBinding);
plugins.remove(pluginClass);
}
}
@Override
public void remove(@NonNull Set<Class<? extends FlutterPlugin>> pluginClasses) {
for (Class<? extends FlutterPlugin> pluginClass : pluginClasses) {
remove(pluginClass);
}
}
@Override
public void removeAll() {
// We copy the keys to a new set so that we can mutate the set while using
// the keys.
remove(new HashSet<>(plugins.keySet()));
plugins.clear();
}
private void detachFromAppComponent() {
if (isAttachedToActivity()) {
detachFromActivity();
} else if (isAttachedToService()) {
detachFromService();
} else if (isAttachedToBroadcastReceiver()) {
detachFromBroadcastReceiver();
} else if (isAttachedToContentProvider()) {
detachFromContentProvider();
}
}
// -------- Start ActivityControlSurface -------
private boolean isAttachedToActivity() {
return exclusiveActivity != null;
}
private Activity attachedActivity() {
return exclusiveActivity != null ? exclusiveActivity.getAppComponent() : null;
}
@Override
public void attachToActivity(
@NonNull ExclusiveAppComponent<Activity> exclusiveActivity, @NonNull Lifecycle lifecycle) {
try (TraceSection e = TraceSection.scoped("FlutterEngineConnectionRegistry#attachToActivity")) {
if (this.exclusiveActivity != null) {
this.exclusiveActivity.detachFromFlutterEngine();
}
// If we were already attached to an app component, detach from it.
detachFromAppComponent();
this.exclusiveActivity = exclusiveActivity;
attachToActivityInternal(exclusiveActivity.getAppComponent(), lifecycle);
}
}
private void attachToActivityInternal(@NonNull Activity activity, @NonNull Lifecycle lifecycle) {
this.activityPluginBinding = new FlutterEngineActivityPluginBinding(activity, lifecycle);
final boolean useSoftwareRendering =
activity.getIntent() != null
? activity
.getIntent()
.getBooleanExtra(FlutterShellArgs.ARG_KEY_ENABLE_SOFTWARE_RENDERING, false)
: false;
flutterEngine.getPlatformViewsController().setSoftwareRendering(useSoftwareRendering);
// Activate the PlatformViewsController. This must happen before any plugins attempt
// to use it, otherwise an error stack trace will appear that says there is no
// flutter/platform_views channel.
flutterEngine
.getPlatformViewsController()
.attach(activity, flutterEngine.getRenderer(), flutterEngine.getDartExecutor());
// Notify all ActivityAware plugins that they are now attached to a new Activity.
for (ActivityAware activityAware : activityAwarePlugins.values()) {
if (isWaitingForActivityReattachment) {
activityAware.onReattachedToActivityForConfigChanges(activityPluginBinding);
} else {
activityAware.onAttachedToActivity(activityPluginBinding);
}
}
isWaitingForActivityReattachment = false;
}
@Override
public void detachFromActivityForConfigChanges() {
if (isAttachedToActivity()) {
try (TraceSection e =
TraceSection.scoped(
"FlutterEngineConnectionRegistry#detachFromActivityForConfigChanges")) {
isWaitingForActivityReattachment = true;
for (ActivityAware activityAware : activityAwarePlugins.values()) {
activityAware.onDetachedFromActivityForConfigChanges();
}
detachFromActivityInternal();
}
} else {
Log.e(TAG, "Attempted to detach plugins from an Activity when no Activity was attached.");
}
}
@Override
public void detachFromActivity() {
if (isAttachedToActivity()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#detachFromActivity")) {
for (ActivityAware activityAware : activityAwarePlugins.values()) {
activityAware.onDetachedFromActivity();
}
detachFromActivityInternal();
}
} else {
Log.e(TAG, "Attempted to detach plugins from an Activity when no Activity was attached.");
}
}
private void detachFromActivityInternal() {
// Deactivate PlatformViewsController.
flutterEngine.getPlatformViewsController().detach();
exclusiveActivity = null;
activityPluginBinding = null;
}
@Override
public boolean onRequestPermissionsResult(
int requestCode, @NonNull String[] permissions, @NonNull int[] grantResult) {
if (isAttachedToActivity()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#onRequestPermissionsResult")) {
return activityPluginBinding.onRequestPermissionsResult(
requestCode, permissions, grantResult);
}
} else {
Log.e(
TAG,
"Attempted to notify ActivityAware plugins of onRequestPermissionsResult, but no Activity"
+ " was attached.");
return false;
}
}
@Override
public boolean onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
if (isAttachedToActivity()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#onActivityResult")) {
return activityPluginBinding.onActivityResult(requestCode, resultCode, data);
}
} else {
Log.e(
TAG,
"Attempted to notify ActivityAware plugins of onActivityResult, but no Activity was"
+ " attached.");
return false;
}
}
@Override
public void onNewIntent(@NonNull Intent intent) {
if (isAttachedToActivity()) {
try (TraceSection e = TraceSection.scoped("FlutterEngineConnectionRegistry#onNewIntent")) {
activityPluginBinding.onNewIntent(intent);
}
} else {
Log.e(
TAG,
"Attempted to notify ActivityAware plugins of onNewIntent, but no Activity was"
+ " attached.");
}
}
@Override
public void onUserLeaveHint() {
if (isAttachedToActivity()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#onUserLeaveHint")) {
activityPluginBinding.onUserLeaveHint();
}
} else {
Log.e(
TAG,
"Attempted to notify ActivityAware plugins of onUserLeaveHint, but no Activity was"
+ " attached.");
}
}
@Override
public void onSaveInstanceState(@NonNull Bundle bundle) {
if (isAttachedToActivity()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#onSaveInstanceState")) {
activityPluginBinding.onSaveInstanceState(bundle);
}
} else {
Log.e(
TAG,
"Attempted to notify ActivityAware plugins of onSaveInstanceState, but no Activity was"
+ " attached.");
}
}
@Override
public void onRestoreInstanceState(@Nullable Bundle bundle) {
if (isAttachedToActivity()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#onRestoreInstanceState")) {
activityPluginBinding.onRestoreInstanceState(bundle);
}
} else {
Log.e(
TAG,
"Attempted to notify ActivityAware plugins of onRestoreInstanceState, but no Activity was"
+ " attached.");
}
}
// ------- End ActivityControlSurface -----
// ----- Start ServiceControlSurface ----
private boolean isAttachedToService() {
return service != null;
}
@Override
public void attachToService(
@NonNull Service service, @Nullable Lifecycle lifecycle, boolean isForeground) {
try (TraceSection e = TraceSection.scoped("FlutterEngineConnectionRegistry#attachToService")) {
// If we were already attached to an Android component, detach from it.
detachFromAppComponent();
this.service = service;
this.servicePluginBinding = new FlutterEngineServicePluginBinding(service, lifecycle);
// Notify all ServiceAware plugins that they are now attached to a new Service.
for (ServiceAware serviceAware : serviceAwarePlugins.values()) {
serviceAware.onAttachedToService(servicePluginBinding);
}
}
}
@Override
public void detachFromService() {
if (isAttachedToService()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#detachFromService")) {
// Notify all ServiceAware plugins that they are no longer attached to a Service.
for (ServiceAware serviceAware : serviceAwarePlugins.values()) {
serviceAware.onDetachedFromService();
}
service = null;
servicePluginBinding = null;
}
} else {
Log.e(TAG, "Attempted to detach plugins from a Service when no Service was attached.");
}
}
@Override
public void onMoveToForeground() {
if (isAttachedToService()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#onMoveToForeground")) {
servicePluginBinding.onMoveToForeground();
}
}
}
@Override
public void onMoveToBackground() {
if (isAttachedToService()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#onMoveToBackground")) {
servicePluginBinding.onMoveToBackground();
}
}
}
// ----- End ServiceControlSurface ---
// ----- Start BroadcastReceiverControlSurface ---
private boolean isAttachedToBroadcastReceiver() {
return broadcastReceiver != null;
}
@Override
public void attachToBroadcastReceiver(
@NonNull BroadcastReceiver broadcastReceiver, @NonNull Lifecycle lifecycle) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#attachToBroadcastReceiver")) {
// If we were already attached to an Android component, detach from it.
detachFromAppComponent();
this.broadcastReceiver = broadcastReceiver;
this.broadcastReceiverPluginBinding =
new FlutterEngineBroadcastReceiverPluginBinding(broadcastReceiver);
// TODO(mattcarroll): resolve possibility of different lifecycles between this and engine
// attachment
// Notify all BroadcastReceiverAware plugins that they are now attached to a new
// BroadcastReceiver.
for (BroadcastReceiverAware broadcastReceiverAware : broadcastReceiverAwarePlugins.values()) {
broadcastReceiverAware.onAttachedToBroadcastReceiver(broadcastReceiverPluginBinding);
}
}
}
@Override
public void detachFromBroadcastReceiver() {
if (isAttachedToBroadcastReceiver()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#detachFromBroadcastReceiver")) {
// Notify all BroadcastReceiverAware plugins that they are no longer attached to a
// BroadcastReceiver.
for (BroadcastReceiverAware broadcastReceiverAware :
broadcastReceiverAwarePlugins.values()) {
broadcastReceiverAware.onDetachedFromBroadcastReceiver();
}
}
} else {
Log.e(
TAG,
"Attempted to detach plugins from a BroadcastReceiver when no BroadcastReceiver was"
+ " attached.");
}
}
// ----- End BroadcastReceiverControlSurface ----
// ----- Start ContentProviderControlSurface ----
private boolean isAttachedToContentProvider() {
return contentProvider != null;
}
@Override
public void attachToContentProvider(
@NonNull ContentProvider contentProvider, @NonNull Lifecycle lifecycle) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#attachToContentProvider")) {
// If we were already attached to an Android component, detach from it.
detachFromAppComponent();
this.contentProvider = contentProvider;
this.contentProviderPluginBinding =
new FlutterEngineContentProviderPluginBinding(contentProvider);
// TODO(mattcarroll): resolve possibility of different lifecycles between this and engine
// attachment
// Notify all ContentProviderAware plugins that they are now attached to a new
// ContentProvider.
for (ContentProviderAware contentProviderAware : contentProviderAwarePlugins.values()) {
contentProviderAware.onAttachedToContentProvider(contentProviderPluginBinding);
}
}
}
@Override
public void detachFromContentProvider() {
if (isAttachedToContentProvider()) {
try (TraceSection e =
TraceSection.scoped("FlutterEngineConnectionRegistry#detachFromContentProvider")) {
// Notify all ContentProviderAware plugins that they are no longer attached to a
// ContentProvider.
for (ContentProviderAware contentProviderAware : contentProviderAwarePlugins.values()) {
contentProviderAware.onDetachedFromContentProvider();
}
}
} else {
Log.e(
TAG,
"Attempted to detach plugins from a ContentProvider when no ContentProvider was"
+ " attached.");
}
}
// ----- End ContentProviderControlSurface -----
private static class DefaultFlutterAssets implements FlutterPlugin.FlutterAssets {
final FlutterLoader flutterLoader;
private DefaultFlutterAssets(@NonNull FlutterLoader flutterLoader) {
this.flutterLoader = flutterLoader;
}
public String getAssetFilePathByName(@NonNull String assetFileName) {
return flutterLoader.getLookupKeyForAsset(assetFileName);
}
public String getAssetFilePathByName(
@NonNull String assetFileName, @NonNull String packageName) {
return flutterLoader.getLookupKeyForAsset(assetFileName, packageName);
}
public String getAssetFilePathBySubpath(@NonNull String assetSubpath) {
return flutterLoader.getLookupKeyForAsset(assetSubpath);
}
public String getAssetFilePathBySubpath(
@NonNull String assetSubpath, @NonNull String packageName) {
return flutterLoader.getLookupKeyForAsset(assetSubpath, packageName);
}
}
private static class FlutterEngineActivityPluginBinding implements ActivityPluginBinding {
@NonNull private final Activity activity;
@NonNull private final HiddenLifecycleReference hiddenLifecycleReference;
@NonNull
private final Set<io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener>
onRequestPermissionsResultListeners = new HashSet<>();
@NonNull
private final Set<io.flutter.plugin.common.PluginRegistry.ActivityResultListener>
onActivityResultListeners = new HashSet<>();
@NonNull
private final Set<io.flutter.plugin.common.PluginRegistry.NewIntentListener>
onNewIntentListeners = new HashSet<>();
@NonNull
private final Set<io.flutter.plugin.common.PluginRegistry.UserLeaveHintListener>
onUserLeaveHintListeners = new HashSet<>();
@NonNull
private final Set<io.flutter.plugin.common.PluginRegistry.WindowFocusChangedListener>
onWindowFocusChangedListeners = new HashSet<>();
@NonNull
private final Set<OnSaveInstanceStateListener> onSaveInstanceStateListeners = new HashSet<>();
public FlutterEngineActivityPluginBinding(
@NonNull Activity activity, @NonNull Lifecycle lifecycle) {
this.activity = activity;
this.hiddenLifecycleReference = new HiddenLifecycleReference(lifecycle);
}
@Override
@NonNull
public Activity getActivity() {
return activity;
}
@NonNull
@Override
public Object getLifecycle() {
return hiddenLifecycleReference;
}
@Override
public void addRequestPermissionsResultListener(
@NonNull
io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener listener) {
onRequestPermissionsResultListeners.add(listener);
}
@Override
public void removeRequestPermissionsResultListener(
@NonNull
io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener listener) {
onRequestPermissionsResultListeners.remove(listener);
}
/**
* Invoked by the {@link io.flutter.embedding.engine.FlutterEngine} that owns this {@code
* ActivityPluginBinding} when its associated {@link android.app.Activity} has its {@code
* onRequestPermissionsResult(...)} method invoked.
*/
boolean onRequestPermissionsResult(
int requestCode, @NonNull String[] permissions, @NonNull int[] grantResult) {
boolean didConsumeResult = false;
for (io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener listener :
onRequestPermissionsResultListeners) {
didConsumeResult =
listener.onRequestPermissionsResult(requestCode, permissions, grantResult)
|| didConsumeResult;
}
return didConsumeResult;
}
@Override
public void addActivityResultListener(
@NonNull io.flutter.plugin.common.PluginRegistry.ActivityResultListener listener) {
onActivityResultListeners.add(listener);
}
@Override
public void removeActivityResultListener(
@NonNull io.flutter.plugin.common.PluginRegistry.ActivityResultListener listener) {
onActivityResultListeners.remove(listener);
}
/**
* Invoked by the {@link io.flutter.embedding.engine.FlutterEngine} that owns this {@code
* ActivityPluginBinding} when its associated {@link android.app.Activity} has its {@code
* onActivityResult(...)} method invoked.
*/
boolean onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
boolean didConsumeResult = false;
for (io.flutter.plugin.common.PluginRegistry.ActivityResultListener listener :
new HashSet<>(onActivityResultListeners)) {
didConsumeResult =
listener.onActivityResult(requestCode, resultCode, data) || didConsumeResult;
}
return didConsumeResult;
}
@Override
public void addOnNewIntentListener(
@NonNull io.flutter.plugin.common.PluginRegistry.NewIntentListener listener) {
onNewIntentListeners.add(listener);
}
@Override
public void removeOnNewIntentListener(
@NonNull io.flutter.plugin.common.PluginRegistry.NewIntentListener listener) {
onNewIntentListeners.remove(listener);
}
/**
* Invoked by the {@link io.flutter.embedding.engine.FlutterEngine} that owns this {@code
* ActivityPluginBinding} when its associated {@link android.app.Activity} has its {@code
* onNewIntent(...)} method invoked.
*/
void onNewIntent(@Nullable Intent intent) {
for (io.flutter.plugin.common.PluginRegistry.NewIntentListener listener :
onNewIntentListeners) {
listener.onNewIntent(intent);
}
}
@Override
public void addOnUserLeaveHintListener(
@NonNull io.flutter.plugin.common.PluginRegistry.UserLeaveHintListener listener) {
onUserLeaveHintListeners.add(listener);
}
@Override
public void removeOnUserLeaveHintListener(
@NonNull io.flutter.plugin.common.PluginRegistry.UserLeaveHintListener listener) {
onUserLeaveHintListeners.remove(listener);
}
@Override
public void addOnWindowFocusChangedListener(
@NonNull io.flutter.plugin.common.PluginRegistry.WindowFocusChangedListener listener) {
onWindowFocusChangedListeners.add(listener);
}
@Override
public void removeOnWindowFocusChangedListener(
@NonNull io.flutter.plugin.common.PluginRegistry.WindowFocusChangedListener listener) {
onWindowFocusChangedListeners.remove(listener);
}
void onWindowFocusChanged(boolean hasFocus) {
for (io.flutter.plugin.common.PluginRegistry.WindowFocusChangedListener listener :
onWindowFocusChangedListeners) {
listener.onWindowFocusChanged(hasFocus);
}
}
@Override
public void addOnSaveStateListener(@NonNull OnSaveInstanceStateListener listener) {
onSaveInstanceStateListeners.add(listener);
}
@Override
public void removeOnSaveStateListener(@NonNull OnSaveInstanceStateListener listener) {
onSaveInstanceStateListeners.remove(listener);
}
/**
* Invoked by the {@link io.flutter.embedding.engine.FlutterEngine} that owns this {@code
* ActivityPluginBinding} when its associated {@link android.app.Activity} has its {@code
* onUserLeaveHint()} method invoked.
*/
void onUserLeaveHint() {
for (io.flutter.plugin.common.PluginRegistry.UserLeaveHintListener listener :
onUserLeaveHintListeners) {
listener.onUserLeaveHint();
}
}
/**
* Invoked by the {@link io.flutter.embedding.engine.FlutterEngine} that owns this {@code
* ActivityPluginBinding} when its associated {@link android.app.Activity} or {@code Fragment}
* has its {@code onSaveInstanceState(Bundle)} method invoked.
*/
void onSaveInstanceState(@NonNull Bundle bundle) {
for (OnSaveInstanceStateListener listener : onSaveInstanceStateListeners) {
listener.onSaveInstanceState(bundle);
}
}
/**
* Invoked by the {@link io.flutter.embedding.engine.FlutterEngine} that owns this {@code
* ActivityPluginBinding} when its associated {@link android.app.Activity} or {@code Fragment}
* has its {@code onCreate(Bundle)} method invoked.
*/
void onRestoreInstanceState(@Nullable Bundle bundle) {
for (OnSaveInstanceStateListener listener : onSaveInstanceStateListeners) {
listener.onRestoreInstanceState(bundle);
}
}
}
private static class FlutterEngineServicePluginBinding implements ServicePluginBinding {
@NonNull private final Service service;
@Nullable private final HiddenLifecycleReference hiddenLifecycleReference;
@NonNull
private final Set<ServiceAware.OnModeChangeListener> onModeChangeListeners = new HashSet<>();
FlutterEngineServicePluginBinding(@NonNull Service service, @Nullable Lifecycle lifecycle) {
this.service = service;
hiddenLifecycleReference = lifecycle != null ? new HiddenLifecycleReference(lifecycle) : null;
}
@Override
@NonNull
public Service getService() {
return service;
}
@Nullable
@Override
public Object getLifecycle() {
return hiddenLifecycleReference;
}
@Override
public void addOnModeChangeListener(@NonNull ServiceAware.OnModeChangeListener listener) {
onModeChangeListeners.add(listener);
}
@Override
public void removeOnModeChangeListener(@NonNull ServiceAware.OnModeChangeListener listener) {
onModeChangeListeners.remove(listener);
}
void onMoveToForeground() {
for (ServiceAware.OnModeChangeListener listener : onModeChangeListeners) {
listener.onMoveToForeground();
}
}
void onMoveToBackground() {
for (ServiceAware.OnModeChangeListener listener : onModeChangeListeners) {
listener.onMoveToBackground();
}
}
}
private static class FlutterEngineBroadcastReceiverPluginBinding
implements BroadcastReceiverPluginBinding {
@NonNull private final BroadcastReceiver broadcastReceiver;
FlutterEngineBroadcastReceiverPluginBinding(@NonNull BroadcastReceiver broadcastReceiver) {
this.broadcastReceiver = broadcastReceiver;
}
@NonNull
@Override
public BroadcastReceiver getBroadcastReceiver() {
return broadcastReceiver;
}
}
private static class FlutterEngineContentProviderPluginBinding
implements ContentProviderPluginBinding {
@NonNull private final ContentProvider contentProvider;
FlutterEngineContentProviderPluginBinding(@NonNull ContentProvider contentProvider) {
this.contentProvider = contentProvider;
}
@NonNull
@Override
public ContentProvider getContentProvider() {
return contentProvider;
}
}
}
| engine/shell/platform/android/io/flutter/embedding/engine/FlutterEngineConnectionRegistry.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/FlutterEngineConnectionRegistry.java",
"repo_id": "engine",
"token_count": 12088
} | 331 |
package io.flutter.embedding.engine.mutatorsstack;
import static android.view.View.OnFocusChangeListener;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Path;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewTreeObserver;
import android.view.accessibility.AccessibilityEvent;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import io.flutter.embedding.android.AndroidTouchProcessor;
import io.flutter.util.ViewUtils;
/**
* A view that applies the {@link io.flutter.embedding.engine.mutatorsstack.FlutterMutatorsStack} to
* its children.
*/
public class FlutterMutatorView extends FrameLayout {
private FlutterMutatorsStack mutatorsStack;
private float screenDensity;
private int left;
private int top;
private int prevLeft;
private int prevTop;
private final AndroidTouchProcessor androidTouchProcessor;
/**
* Initialize the FlutterMutatorView. Use this to set the screenDensity, which will be used to
* correct the final transform matrix.
*/
public FlutterMutatorView(
@NonNull Context context,
float screenDensity,
@Nullable AndroidTouchProcessor androidTouchProcessor) {
super(context, null);
this.screenDensity = screenDensity;
this.androidTouchProcessor = androidTouchProcessor;
}
/** Initialize the FlutterMutatorView. */
public FlutterMutatorView(@NonNull Context context) {
this(context, 1, /* androidTouchProcessor=*/ null);
}
@Nullable @VisibleForTesting ViewTreeObserver.OnGlobalFocusChangeListener activeFocusListener;
/**
* Sets a focus change listener that notifies when the current view or any of its descendant views
* have received focus.
*
* <p>If there's an active focus listener, it will first remove the current listener, and then add
* the new one.
*
* @param userFocusListener A user provided focus listener.
*/
public void setOnDescendantFocusChangeListener(@NonNull OnFocusChangeListener userFocusListener) {
unsetOnDescendantFocusChangeListener();
final View mutatorView = this;
final ViewTreeObserver observer = getViewTreeObserver();
if (observer.isAlive() && activeFocusListener == null) {
activeFocusListener =
new ViewTreeObserver.OnGlobalFocusChangeListener() {
@Override
public void onGlobalFocusChanged(View oldFocus, View newFocus) {
userFocusListener.onFocusChange(mutatorView, ViewUtils.childHasFocus(mutatorView));
}
};
observer.addOnGlobalFocusChangeListener(activeFocusListener);
}
}
/** Unsets any active focus listener. */
public void unsetOnDescendantFocusChangeListener() {
final ViewTreeObserver observer = getViewTreeObserver();
if (observer.isAlive() && activeFocusListener != null) {
final ViewTreeObserver.OnGlobalFocusChangeListener currFocusListener = activeFocusListener;
activeFocusListener = null;
observer.removeOnGlobalFocusChangeListener(currFocusListener);
}
}
/**
* Pass the necessary parameters to the view so it can apply correct mutations to its children.
*/
public void readyToDisplay(
@NonNull FlutterMutatorsStack mutatorsStack, int left, int top, int width, int height) {
this.mutatorsStack = mutatorsStack;
this.left = left;
this.top = top;
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(width, height);
layoutParams.leftMargin = left;
layoutParams.topMargin = top;
setLayoutParams(layoutParams);
setWillNotDraw(false);
}
@Override
public void draw(Canvas canvas) {
// Apply all clippings on the parent canvas.
canvas.save();
for (Path path : mutatorsStack.getFinalClippingPaths()) {
// Reverse the current offset.
//
// The frame of this view includes the final offset of the bounding rect.
// We need to apply all the mutators to the view, which includes the mutation that leads to
// the final offset. We should reverse this final offset, both as a translate mutation and to
// all the clipping paths
Path pathCopy = new Path(path);
pathCopy.offset(-left, -top);
canvas.clipPath(pathCopy);
}
super.draw(canvas);
canvas.restore();
}
@Override
public void dispatchDraw(Canvas canvas) {
// Apply all the transforms on the child canvas.
canvas.save();
canvas.concat(getPlatformViewMatrix());
super.dispatchDraw(canvas);
canvas.restore();
}
private Matrix getPlatformViewMatrix() {
Matrix finalMatrix = new Matrix(mutatorsStack.getFinalMatrix());
// Reverse scale based on screen scale.
//
// The Android frame is set based on the logical resolution instead of physical.
// (https://developer.android.com/training/multiscreen/screendensities).
// However, flow is based on the physical resolution. For example, 1000 pixels in flow equals
// 500 points in Android. And until this point, we did all the calculation based on the flow
// resolution. So we need to scale down to match Android's logical resolution.
finalMatrix.preScale(1 / screenDensity, 1 / screenDensity);
// Reverse the current offset.
//
// The frame of this view includes the final offset of the bounding rect.
// We need to apply all the mutators to the view, which includes the mutation that leads to
// the final offset. We should reverse this final offset, both as a translate mutation and to
// all the clipping paths
finalMatrix.postTranslate(-left, -top);
return finalMatrix;
}
/** Intercept the events here and do not propagate them to the child platform views. */
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
return true;
}
@Override
public boolean requestSendAccessibilityEvent(View child, AccessibilityEvent event) {
final View embeddedView = getChildAt(0);
if (embeddedView != null
&& embeddedView.getImportantForAccessibility()
== View.IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS) {
return false;
}
// Forward the request only if the embedded view is in the Flutter accessibility tree.
// The embedded view may be ignored when the framework doesn't populate a SemanticNode
// for the current platform view.
// See AccessibilityBridge for more.
return super.requestSendAccessibilityEvent(child, event);
}
@Override
@SuppressLint("ClickableViewAccessibility")
public boolean onTouchEvent(MotionEvent event) {
if (androidTouchProcessor == null) {
return super.onTouchEvent(event);
}
final Matrix screenMatrix = new Matrix();
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
prevLeft = left;
prevTop = top;
screenMatrix.postTranslate(left, top);
break;
case MotionEvent.ACTION_MOVE:
// While the view is dragged, use the left and top positions as
// they were at the moment the touch event fired.
screenMatrix.postTranslate(prevLeft, prevTop);
prevLeft = left;
prevTop = top;
break;
case MotionEvent.ACTION_UP:
default:
screenMatrix.postTranslate(left, top);
break;
}
return androidTouchProcessor.onTouchEvent(event, screenMatrix);
}
}
| engine/shell/platform/android/io/flutter/embedding/engine/mutatorsstack/FlutterMutatorView.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/mutatorsstack/FlutterMutatorView.java",
"repo_id": "engine",
"token_count": 2390
} | 332 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.plugins.service;
import android.app.Service;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/** Binding that gives {@link ServiceAware} plugins access to an associated {@link Service}. */
public interface ServicePluginBinding {
/**
* Returns the {@link Service} that is currently attached to the {@link
* io.flutter.embedding.engine.FlutterEngine} that owns this {@code ServicePluginBinding}.
*/
@NonNull
Service getService();
/**
* Returns the {@code Lifecycle} associated with the attached {@code Service}.
*
* <p>Use the flutter_plugin_android_lifecycle plugin to turn the returned {@code Object} into a
* {@code Lifecycle} object. See
* (https://github.com/flutter/plugins/tree/master/packages/flutter_plugin_android_lifecycle).
* Flutter plugins that rely on {@code Lifecycle} are forced to use the
* flutter_plugin_android_lifecycle plugin so that the version of the Android Lifecycle library is
* exposed to pub, which allows Flutter to manage different versions library over time.
*/
@Nullable
Object getLifecycle();
/**
* Adds the given {@code listener} to be notified when the associated {@link Service} goes from
* background to foreground, or foreground to background.
*/
void addOnModeChangeListener(@NonNull ServiceAware.OnModeChangeListener listener);
/**
* Removes the given {@code listener}, which was previously added with {@link
* #addOnModeChangeListener(ServiceAware.OnModeChangeListener)}.
*/
void removeOnModeChangeListener(@NonNull ServiceAware.OnModeChangeListener listener);
}
| engine/shell/platform/android/io/flutter/embedding/engine/plugins/service/ServicePluginBinding.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/plugins/service/ServicePluginBinding.java",
"repo_id": "engine",
"token_count": 514
} | 333 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.systemchannels;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import io.flutter.Log;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.plugin.common.JSONMethodCodec;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import java.util.HashMap;
import java.util.Map;
/** TODO(mattcarroll): fill in javadoc for NavigationChannel. */
public class NavigationChannel {
private static final String TAG = "NavigationChannel";
@NonNull public final MethodChannel channel;
public NavigationChannel(@NonNull DartExecutor dartExecutor) {
this.channel = new MethodChannel(dartExecutor, "flutter/navigation", JSONMethodCodec.INSTANCE);
channel.setMethodCallHandler(defaultHandler);
}
// Provide a default handler that returns an empty response to any messages
// on this channel.
private final MethodChannel.MethodCallHandler defaultHandler =
new MethodChannel.MethodCallHandler() {
@Override
public void onMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result result) {
result.success(null);
}
};
public void setInitialRoute(@NonNull String initialRoute) {
Log.v(TAG, "Sending message to set initial route to '" + initialRoute + "'");
channel.invokeMethod("setInitialRoute", initialRoute);
}
public void pushRoute(@NonNull String route) {
Log.v(TAG, "Sending message to push route '" + route + "'");
channel.invokeMethod("pushRoute", route);
}
public void pushRouteInformation(@NonNull String route) {
Log.v(TAG, "Sending message to push route information '" + route + "'");
Map<String, String> message = new HashMap<>();
message.put("location", route);
channel.invokeMethod("pushRouteInformation", message);
}
public void popRoute() {
Log.v(TAG, "Sending message to pop route.");
channel.invokeMethod("popRoute", null);
}
public void setMethodCallHandler(@Nullable MethodChannel.MethodCallHandler handler) {
channel.setMethodCallHandler(handler);
}
}
| engine/shell/platform/android/io/flutter/embedding/engine/systemchannels/NavigationChannel.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/systemchannels/NavigationChannel.java",
"repo_id": "engine",
"token_count": 700
} | 334 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.common;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
/**
* A {@link MessageCodec} using UTF-8 encoded JSON messages.
*
* <p>This codec is guaranteed to be compatible with the corresponding <a
* href="https://api.flutter.dev/flutter/services/JSONMessageCodec-class.html">JSONMessageCodec</a>
* on the Dart side. These parts of the Flutter SDK are evolved synchronously.
*
* <p>Supports the same Java values as {@link JSONObject#wrap(Object)}.
*
* <p>On the Dart side, JSON messages are handled by the JSON facilities of the <a
* href="https://api.dartlang.org/stable/dart-convert/JSON-constant.html">dart:convert</a> package.
*/
public final class JSONMessageCodec implements MessageCodec<Object> {
// This codec must match the Dart codec of the same name in package flutter/services.
public static final JSONMessageCodec INSTANCE = new JSONMessageCodec();
private JSONMessageCodec() {}
@Override
@Nullable
public ByteBuffer encodeMessage(@Nullable Object message) {
if (message == null) {
return null;
}
final Object wrapped = JSONUtil.wrap(message);
if (wrapped instanceof String) {
return StringCodec.INSTANCE.encodeMessage(JSONObject.quote((String) wrapped));
} else {
return StringCodec.INSTANCE.encodeMessage(wrapped.toString());
}
}
@Override
@Nullable
public Object decodeMessage(@Nullable ByteBuffer message) {
if (message == null) {
return null;
}
try {
final String json = StringCodec.INSTANCE.decodeMessage(message);
final JSONTokener tokener = new JSONTokener(json);
final Object value = tokener.nextValue();
if (tokener.more()) {
throw new IllegalArgumentException("Invalid JSON");
}
return value;
} catch (JSONException e) {
throw new IllegalArgumentException("Invalid JSON", e);
}
}
}
| engine/shell/platform/android/io/flutter/plugin/common/JSONMessageCodec.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/plugin/common/JSONMessageCodec.java",
"repo_id": "engine",
"token_count": 699
} | 335 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.editing;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import io.flutter.Log;
import org.json.JSONException;
import org.json.JSONObject;
/// A representation of the change that occured to an editing state, along with the resulting
/// composing and selection regions.
public final class TextEditingDelta {
private @NonNull CharSequence oldText;
private @NonNull CharSequence deltaText;
private int deltaStart;
private int deltaEnd;
private int newSelectionStart;
private int newSelectionEnd;
private int newComposingStart;
private int newComposingEnd;
private static final String TAG = "TextEditingDelta";
public TextEditingDelta(
@NonNull CharSequence oldEditable,
int replacementDestinationStart,
int replacementDestinationEnd,
@NonNull CharSequence replacementSource,
int selectionStart,
int selectionEnd,
int composingStart,
int composingEnd) {
newSelectionStart = selectionStart;
newSelectionEnd = selectionEnd;
newComposingStart = composingStart;
newComposingEnd = composingEnd;
setDeltas(
oldEditable,
replacementSource.toString(),
replacementDestinationStart,
replacementDestinationEnd);
}
// Non text update delta constructor.
public TextEditingDelta(
@NonNull CharSequence oldText,
int selectionStart,
int selectionEnd,
int composingStart,
int composingEnd) {
newSelectionStart = selectionStart;
newSelectionEnd = selectionEnd;
newComposingStart = composingStart;
newComposingEnd = composingEnd;
setDeltas(oldText, "", -1, -1);
}
@VisibleForTesting
@NonNull
public CharSequence getOldText() {
return oldText;
}
@VisibleForTesting
@NonNull
public CharSequence getDeltaText() {
return deltaText;
}
@VisibleForTesting
public int getDeltaStart() {
return deltaStart;
}
@VisibleForTesting
public int getDeltaEnd() {
return deltaEnd;
}
@VisibleForTesting
public int getNewSelectionStart() {
return newSelectionStart;
}
@VisibleForTesting
public int getNewSelectionEnd() {
return newSelectionEnd;
}
@VisibleForTesting
public int getNewComposingStart() {
return newComposingStart;
}
@VisibleForTesting
public int getNewComposingEnd() {
return newComposingEnd;
}
private void setDeltas(
@NonNull CharSequence oldText, @NonNull CharSequence newText, int newStart, int newExtent) {
this.oldText = oldText;
deltaText = newText;
deltaStart = newStart;
deltaEnd = newExtent;
}
@NonNull
public JSONObject toJSON() {
JSONObject delta = new JSONObject();
try {
delta.put("oldText", oldText.toString());
delta.put("deltaText", deltaText.toString());
delta.put("deltaStart", deltaStart);
delta.put("deltaEnd", deltaEnd);
delta.put("selectionBase", newSelectionStart);
delta.put("selectionExtent", newSelectionEnd);
delta.put("composingBase", newComposingStart);
delta.put("composingExtent", newComposingEnd);
} catch (JSONException e) {
Log.e(TAG, "unable to create JSONObject: " + e);
}
return delta;
}
}
| engine/shell/platform/android/io/flutter/plugin/editing/TextEditingDelta.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/plugin/editing/TextEditingDelta.java",
"repo_id": "engine",
"token_count": 1159
} | 336 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.platform;
import android.content.Context;
import android.graphics.Rect;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
/*
* A view group that implements the same layout protocol that exist between the WindowManager and its direct
* children.
*
* Currently only a subset of the protocol is supported (gravity, x, and y).
*/
class SingleViewFakeWindowViewGroup extends ViewGroup {
// Used in onLayout to keep the bounds of the current view.
// We keep it as a member to avoid object allocations during onLayout which are discouraged.
private final Rect viewBounds;
// Used in onLayout to keep the bounds of the child views.
// We keep it as a member to avoid object allocations during onLayout which are discouraged.
private final Rect childRect;
public SingleViewFakeWindowViewGroup(Context context) {
super(context);
viewBounds = new Rect();
childRect = new Rect();
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
WindowManager.LayoutParams params = (WindowManager.LayoutParams) child.getLayoutParams();
viewBounds.set(l, t, r, b);
Gravity.apply(
params.gravity,
child.getMeasuredWidth(),
child.getMeasuredHeight(),
viewBounds,
params.x,
params.y,
childRect);
child.layout(childRect.left, childRect.top, childRect.right, childRect.bottom);
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
child.measure(atMost(widthMeasureSpec), atMost(heightMeasureSpec));
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
private static int atMost(int measureSpec) {
return MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(measureSpec), MeasureSpec.AT_MOST);
}
}
| engine/shell/platform/android/io/flutter/plugin/platform/SingleViewFakeWindowViewGroup.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/plugin/platform/SingleViewFakeWindowViewGroup.java",
"repo_id": "engine",
"token_count": 721
} | 337 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.view;
import androidx.annotation.Keep;
import io.flutter.embedding.engine.FlutterJNI;
/**
* A class representing information for a callback registered using `PluginUtilities` from
* `dart:ui`.
*/
@Keep
public final class FlutterCallbackInformation {
public final String callbackName;
public final String callbackClassName;
public final String callbackLibraryPath;
/**
* Get callback information for a given handle.
*
* @param handle the handle for the callback, generated by `PluginUtilities.getCallbackHandle` in
* `dart:ui`.
* @return an instance of FlutterCallbackInformation for the provided handle.
*/
public static FlutterCallbackInformation lookupCallbackInformation(long handle) {
return FlutterJNI.nativeLookupCallbackInformation(handle);
}
private FlutterCallbackInformation(
String callbackName, String callbackClassName, String callbackLibraryPath) {
this.callbackName = callbackName;
this.callbackClassName = callbackClassName;
this.callbackLibraryPath = callbackLibraryPath;
}
}
| engine/shell/platform/android/io/flutter/view/FlutterCallbackInformation.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/view/FlutterCallbackInformation.java",
"repo_id": "engine",
"token_count": 338
} | 338 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_ANDROID_PLATFORM_MESSAGE_RESPONSE_ANDROID_H_
#define FLUTTER_SHELL_PLATFORM_ANDROID_PLATFORM_MESSAGE_RESPONSE_ANDROID_H_
#include "flutter/fml/macros.h"
#include "flutter/fml/platform/android/jni_weak_ref.h"
#include "flutter/fml/task_runner.h"
#include "flutter/lib/ui/window/platform_message_response.h"
#include "flutter/shell/platform/android/jni/platform_view_android_jni.h"
namespace flutter {
class PlatformMessageResponseAndroid : public flutter::PlatformMessageResponse {
public:
// |flutter::PlatformMessageResponse|
void Complete(std::unique_ptr<fml::Mapping> data) override;
// |flutter::PlatformMessageResponse|
void CompleteEmpty() override;
private:
PlatformMessageResponseAndroid(
int response_id,
std::shared_ptr<PlatformViewAndroidJNI> jni_facade,
fml::RefPtr<fml::TaskRunner> platform_task_runner);
~PlatformMessageResponseAndroid() override;
const int response_id_;
const std::shared_ptr<PlatformViewAndroidJNI> jni_facade_;
const fml::RefPtr<fml::TaskRunner> platform_task_runner_;
FML_FRIEND_MAKE_REF_COUNTED(PlatformMessageResponseAndroid);
FML_DISALLOW_COPY_AND_ASSIGN(PlatformMessageResponseAndroid);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_ANDROID_PLATFORM_MESSAGE_RESPONSE_ANDROID_H_
| engine/shell/platform/android/platform_message_response_android.h/0 | {
"file_path": "engine/shell/platform/android/platform_message_response_android.h",
"repo_id": "engine",
"token_count": 521
} | 339 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_ANDROID_SURFACE_ANDROID_SURFACE_MOCK_H_
#define FLUTTER_SHELL_PLATFORM_ANDROID_SURFACE_ANDROID_SURFACE_MOCK_H_
#include "flutter/shell/gpu/gpu_surface_gl_skia.h"
#include "flutter/shell/platform/android/surface/android_surface.h"
#include "gmock/gmock.h"
namespace flutter {
//------------------------------------------------------------------------------
/// Mock for |AndroidSurface|. This implementation can be used in unit
/// tests without requiring the Android toolchain.
///
class AndroidSurfaceMock final : public GPUSurfaceGLDelegate,
public AndroidSurface {
public:
MOCK_METHOD(bool, IsValid, (), (const, override));
MOCK_METHOD(void, TeardownOnScreenContext, (), (override));
MOCK_METHOD(std::unique_ptr<Surface>,
CreateGPUSurface,
(GrDirectContext * gr_context),
(override));
MOCK_METHOD(bool, OnScreenSurfaceResize, (const SkISize& size), (override));
MOCK_METHOD(bool, ResourceContextMakeCurrent, (), (override));
MOCK_METHOD(bool, ResourceContextClearCurrent, (), (override));
MOCK_METHOD(bool,
SetNativeWindow,
(fml::RefPtr<AndroidNativeWindow> window),
(override));
// |GPUSurfaceGLDelegate|
std::unique_ptr<GLContextResult> GLContextMakeCurrent() override;
// |GPUSurfaceGLDelegate|
bool GLContextClearCurrent() override;
// |GPUSurfaceGLDelegate|
bool GLContextPresent(const GLPresentInfo& present_info) override;
// |GPUSurfaceGLDelegate|
GLFBOInfo GLContextFBO(GLFrameInfo frame_info) const override;
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_ANDROID_SURFACE_ANDROID_SURFACE_MOCK_H_
| engine/shell/platform/android/surface/android_surface_mock.h/0 | {
"file_path": "engine/shell/platform/android/surface/android_surface_mock.h",
"repo_id": "engine",
"token_count": 687
} | 340 |
package io.flutter.embedding.android;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.HANDLE_DEEPLINKING_META_DATA_KEY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.test.core.app.ActivityScenario;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.FlutterInjector;
import io.flutter.embedding.android.FlutterActivityLaunchConfigs.BackgroundMode;
import io.flutter.embedding.engine.FlutterEngine;
import io.flutter.embedding.engine.FlutterJNI;
import io.flutter.embedding.engine.loader.FlutterLoader;
import io.flutter.plugins.GeneratedPluginRegistrant;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.Robolectric;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(AndroidJUnit4.class)
public class FlutterFragmentActivityTest {
private final Context ctx = ApplicationProvider.getApplicationContext();
@Before
public void setUp() {
FlutterInjector.reset();
GeneratedPluginRegistrant.clearRegisteredEngines();
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
when(mockFlutterJNI.isAttached()).thenReturn(true);
FlutterJNI.Factory mockFlutterJNIFactory = mock(FlutterJNI.Factory.class);
when(mockFlutterJNIFactory.provideFlutterJNI()).thenReturn(mockFlutterJNI);
FlutterInjector.setInstance(
new FlutterInjector.Builder().setFlutterJNIFactory(mockFlutterJNIFactory).build());
}
@After
public void tearDown() {
GeneratedPluginRegistrant.clearRegisteredEngines();
FlutterInjector.reset();
}
@Test
public void createFlutterFragment_defaultRenderModeSurface() {
final FlutterFragmentActivity activity = new FakeFlutterFragmentActivity();
assertEquals(activity.createFlutterFragment().getRenderMode(), RenderMode.surface);
}
@Test
public void createFlutterFragment_defaultRenderModeTexture() {
final FlutterFragmentActivity activity =
new FakeFlutterFragmentActivity() {
@Override
protected BackgroundMode getBackgroundMode() {
return BackgroundMode.transparent;
}
};
assertEquals(activity.createFlutterFragment().getRenderMode(), RenderMode.texture);
}
@Test
public void createFlutterFragment_customRenderMode() {
final FlutterFragmentActivity activity =
new FakeFlutterFragmentActivity() {
@Override
protected RenderMode getRenderMode() {
return RenderMode.texture;
}
};
assertEquals(activity.createFlutterFragment().getRenderMode(), RenderMode.texture);
}
@Test
public void createFlutterFragment_customDartEntrypointLibraryUri() {
final FlutterFragmentActivity activity =
new FakeFlutterFragmentActivity() {
@Override
public String getDartEntrypointLibraryUri() {
return "package:foo/bar.dart";
}
};
assertEquals(
activity.createFlutterFragment().getDartEntrypointLibraryUri(), "package:foo/bar.dart");
}
@Test
public void hasRootLayoutId() {
FlutterFragmentActivityWithRootLayout activity =
Robolectric.buildActivity(FlutterFragmentActivityWithRootLayout.class).get();
activity.onCreate(null);
assertNotNull(activity.FRAGMENT_CONTAINER_ID);
assertTrue(activity.FRAGMENT_CONTAINER_ID != View.NO_ID);
}
@Test
public void itRegistersPluginsAtConfigurationTime() {
try (ActivityScenario<FlutterFragmentActivity> scenario =
ActivityScenario.launch(FlutterFragmentActivity.class)) {
scenario.onActivity(
activity -> {
List<FlutterEngine> registeredEngines =
GeneratedPluginRegistrant.getRegisteredEngines();
assertEquals(1, registeredEngines.size());
assertEquals(activity.getFlutterEngine(), registeredEngines.get(0));
});
}
}
@Test
public void itDoesNotRegisterPluginsTwiceWhenUsingACachedEngine() {
try (ActivityScenario<FlutterFragmentActivity> scenario =
ActivityScenario.launch(FlutterFragmentActivity.class)) {
scenario.onActivity(
activity -> {
List<FlutterEngine> registeredEngines =
GeneratedPluginRegistrant.getRegisteredEngines();
assertEquals(1, registeredEngines.size());
assertEquals(activity.getFlutterEngine(), registeredEngines.get(0));
});
}
List<FlutterEngine> registeredEngines = GeneratedPluginRegistrant.getRegisteredEngines();
// This might cause the plugins to be registered twice, once by the FlutterEngine constructor,
// and once by the default FlutterFragmentActivity.configureFlutterEngine implementation.
// Test that it doesn't happen.
assertEquals(1, registeredEngines.size());
}
@Test
public void itReturnsValueFromMetaDataWhenCallsShouldHandleDeepLinkingCase1()
throws PackageManager.NameNotFoundException {
FlutterFragmentActivity activity =
Robolectric.buildActivity(FlutterFragmentActivityWithProvidedEngine.class).get();
assertTrue(GeneratedPluginRegistrant.getRegisteredEngines().isEmpty());
Bundle bundle = new Bundle();
bundle.putBoolean(HANDLE_DEEPLINKING_META_DATA_KEY, true);
FlutterFragmentActivity spyFlutterActivity = spy(activity);
when(spyFlutterActivity.getMetaData()).thenReturn(bundle);
assertTrue(spyFlutterActivity.shouldHandleDeeplinking());
}
@Test
public void itReturnsValueFromMetaDataWhenCallsShouldHandleDeepLinkingCase2()
throws PackageManager.NameNotFoundException {
FlutterFragmentActivity activity =
Robolectric.buildActivity(FlutterFragmentActivityWithProvidedEngine.class).get();
assertTrue(GeneratedPluginRegistrant.getRegisteredEngines().isEmpty());
Bundle bundle = new Bundle();
bundle.putBoolean(HANDLE_DEEPLINKING_META_DATA_KEY, false);
FlutterFragmentActivity spyFlutterActivity = spy(activity);
when(spyFlutterActivity.getMetaData()).thenReturn(bundle);
assertFalse(spyFlutterActivity.shouldHandleDeeplinking());
}
@Test
public void itReturnsValueFromMetaDataWhenCallsShouldHandleDeepLinkingCase3()
throws PackageManager.NameNotFoundException {
FlutterFragmentActivity activity =
Robolectric.buildActivity(FlutterFragmentActivityWithProvidedEngine.class).get();
assertTrue(GeneratedPluginRegistrant.getRegisteredEngines().isEmpty());
// Creates an empty bundle.
Bundle bundle = new Bundle();
FlutterFragmentActivity spyFlutterActivity = spy(activity);
when(spyFlutterActivity.getMetaData()).thenReturn(bundle);
// Empty bundle should return false.
assertFalse(spyFlutterActivity.shouldHandleDeeplinking());
}
@Test
public void itAllowsRootLayoutOverride() {
FlutterFragmentActivityWithRootLayout activity =
Robolectric.buildActivity(FlutterFragmentActivityWithRootLayout.class).get();
activity.onCreate(null);
ViewGroup contentView = (ViewGroup) activity.findViewById(android.R.id.content);
boolean foundCustomView = false;
for (int i = 0; i < contentView.getChildCount(); i++) {
foundCustomView =
contentView.getChildAt(i) instanceof FlutterFragmentActivityWithRootLayout.CustomLayout;
if (foundCustomView) {
break;
}
}
assertTrue(foundCustomView);
}
@Test
public void itCreatesAValidFlutterFragment() {
try (ActivityScenario<FlutterFragmentActivityWithProvidedEngine> scenario =
ActivityScenario.launch(FlutterFragmentActivityWithProvidedEngine.class)) {
scenario.onActivity(
activity -> {
assertNotNull(activity.getFlutterEngine());
assertEquals(1, activity.numberOfEnginesCreated);
});
}
}
@Test
public void itRetrievesExistingFlutterFragmentWhenRecreated() {
FlutterFragmentActivityWithProvidedEngine activity =
spy(Robolectric.buildActivity(FlutterFragmentActivityWithProvidedEngine.class).get());
FlutterFragment fragment = mock(FlutterFragment.class);
when(activity.retrieveExistingFlutterFragmentIfPossible()).thenReturn(fragment);
FlutterEngine engine = mock(FlutterEngine.class);
when(fragment.getFlutterEngine()).thenReturn(engine);
activity.onCreate(null);
assertEquals(engine, activity.getFlutterEngine());
assertEquals(0, activity.numberOfEnginesCreated);
}
@Test
public void itHandlesNewFragmentRecreationDuringRestoreWhenActivityIsRecreated() {
FlutterFragmentActivityWithProvidedEngine activity =
spy(Robolectric.buildActivity(FlutterFragmentActivityWithProvidedEngine.class).get());
FlutterFragment fragment = mock(FlutterFragment.class);
// Similar to the above case, except here, it's not just the activity that was destroyed and
// could have its fragment restored in the fragment manager. Here, both activity and fragment
// are destroyed. And the fragment manager recreated the fragment on activity recreate.
when(activity.retrieveExistingFlutterFragmentIfPossible()).thenReturn(null, fragment);
FlutterEngine engine = mock(FlutterEngine.class);
when(fragment.getFlutterEngine()).thenReturn(engine);
activity.onCreate(null);
// The framework would have recreated a new fragment but the fragment activity wouldn't have
// created a new one again.
assertEquals(0, activity.numberOfEnginesCreated);
}
static class FlutterFragmentActivityWithProvidedEngine extends FlutterFragmentActivity {
int numberOfEnginesCreated = 0;
@Override
protected FlutterFragment createFlutterFragment() {
return FlutterFragment.createDefault();
}
@Nullable
@Override
public FlutterEngine provideFlutterEngine(@NonNull Context context) {
FlutterJNI flutterJNI = mock(FlutterJNI.class);
FlutterLoader flutterLoader = mock(FlutterLoader.class);
when(flutterJNI.isAttached()).thenReturn(true);
when(flutterLoader.automaticallyRegisterPlugins()).thenReturn(true);
numberOfEnginesCreated++;
return new FlutterEngine(context, flutterLoader, flutterJNI, new String[] {}, true);
}
}
private static class FakeFlutterFragmentActivity extends FlutterFragmentActivity {
@Override
public Intent getIntent() {
return new Intent();
}
@Override
public String getDartEntrypointFunctionName() {
return "";
}
@Nullable
public String getDartEntrypointLibraryUri() {
return null;
}
@Override
protected String getInitialRoute() {
return "";
}
@Override
protected String getAppBundlePath() {
return "";
}
@Override
protected boolean shouldHandleDeeplinking() {
return false;
}
}
private static class FlutterFragmentActivityWithRootLayout
extends FlutterFragmentActivityWithProvidedEngine {
public static class CustomLayout extends FrameLayout {
public CustomLayout(Context context) {
super(context);
}
}
@Override
protected FrameLayout provideRootLayout(Context context) {
return new CustomLayout(context);
}
}
// This is just a compile time check to ensure that it's possible for FlutterFragmentActivity
// subclasses
// to provide their own intent builders which builds their own runtime types.
private static class FlutterFragmentActivityWithIntentBuilders extends FlutterFragmentActivity {
public static NewEngineIntentBuilder withNewEngine() {
return new NewEngineIntentBuilder(FlutterFragmentActivityWithIntentBuilders.class);
}
public static CachedEngineIntentBuilder withCachedEngine(@NonNull String cachedEngineId) {
return new CachedEngineIntentBuilder(
FlutterFragmentActivityWithIntentBuilders.class, cachedEngineId);
}
public static NewEngineInGroupIntentBuilder withNewEngineInGroup(
@NonNull String engineGroupId) {
return new NewEngineInGroupIntentBuilder(
FlutterFragmentActivityWithIntentBuilders.class, engineGroupId);
}
}
}
| engine/shell/platform/android/test/io/flutter/embedding/android/FlutterFragmentActivityTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/embedding/android/FlutterFragmentActivityTest.java",
"repo_id": "engine",
"token_count": 4281
} | 341 |
package io.flutter.embedding.engine.dart;
import static junit.framework.TestCase.assertNotNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.content.res.AssetManager;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.FlutterInjector;
import io.flutter.embedding.engine.FlutterJNI;
import io.flutter.embedding.engine.dart.DartExecutor.DartEntrypoint;
import io.flutter.embedding.engine.loader.FlutterLoader;
import java.nio.ByteBuffer;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(AndroidJUnit4.class)
public class DartExecutorTest {
@Mock FlutterLoader mockFlutterLoader;
@Before
public void setUp() {
FlutterInjector.reset();
MockitoAnnotations.openMocks(this);
}
@Test
public void itSendsBinaryMessages() {
// Setup test.
FlutterJNI fakeFlutterJni = mock(FlutterJNI.class);
// Create object under test.
DartExecutor dartExecutor = new DartExecutor(fakeFlutterJni, mock(AssetManager.class));
// Verify a BinaryMessenger exists.
assertNotNull(dartExecutor.getBinaryMessenger());
// Execute the behavior under test.
ByteBuffer fakeMessage = mock(ByteBuffer.class);
dartExecutor.getBinaryMessenger().send("fake_channel", fakeMessage);
// Verify that DartExecutor sent our message to FlutterJNI.
verify(fakeFlutterJni, times(1))
.dispatchPlatformMessage(eq("fake_channel"), eq(fakeMessage), anyInt(), anyInt());
}
@Test
public void itNotifiesLowMemoryWarning() {
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
when(mockFlutterJNI.isAttached()).thenReturn(true);
DartExecutor dartExecutor = new DartExecutor(mockFlutterJNI, mock(AssetManager.class));
dartExecutor.notifyLowMemoryWarning();
verify(mockFlutterJNI, times(1)).notifyLowMemoryWarning();
}
@Test
public void itThrowsWhenCreatingADefaultDartEntrypointWithAnUninitializedFlutterLoader() {
assertThrows(
AssertionError.class,
() -> {
DartEntrypoint.createDefault();
});
}
@Test
public void itHasReasonableDefaultsWhenFlutterLoaderIsInitialized() {
when(mockFlutterLoader.initialized()).thenReturn(true);
when(mockFlutterLoader.findAppBundlePath()).thenReturn("my/custom/path");
FlutterInjector.setInstance(
new FlutterInjector.Builder().setFlutterLoader(mockFlutterLoader).build());
DartEntrypoint entrypoint = DartEntrypoint.createDefault();
assertEquals(entrypoint.pathToBundle, "my/custom/path");
assertEquals(entrypoint.dartEntrypointFunctionName, "main");
}
}
| engine/shell/platform/android/test/io/flutter/embedding/engine/dart/DartExecutorTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/embedding/engine/dart/DartExecutorTest.java",
"repo_id": "engine",
"token_count": 1068
} | 342 |
package io.flutter.embedding.engine.systemchannels;
import static io.flutter.Build.API_LEVELS;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import android.annotation.TargetApi;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import java.util.HashMap;
import java.util.Map;
import org.json.JSONException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.robolectric.annotation.Config;
@Config(
manifest = Config.NONE,
shadows = {})
@RunWith(AndroidJUnit4.class)
@TargetApi(API_LEVELS.API_24)
public class RestorationChannelTest {
@Test
public void itDoesNotDoAnythingWhenRestorationDataIsSetBeforeFrameworkAsks()
throws JSONException {
MethodChannel rawChannel = mock(MethodChannel.class);
RestorationChannel restorationChannel =
new RestorationChannel(rawChannel, /*waitForRestorationData=*/ false);
restorationChannel.setRestorationData("Any String you want".getBytes());
verify(rawChannel, times(0)).invokeMethod(any(), any());
}
@Test
public void itSendsDataOverWhenRequestIsPending() throws JSONException {
byte[] data = "Any String you want".getBytes();
MethodChannel rawChannel = mock(MethodChannel.class);
RestorationChannel restorationChannel =
new RestorationChannel(rawChannel, /*waitForRestorationData=*/ true);
ArgumentCaptor<MethodChannel.MethodCallHandler> argumentCaptor =
ArgumentCaptor.forClass(MethodChannel.MethodCallHandler.class);
verify(rawChannel).setMethodCallHandler(argumentCaptor.capture());
MethodChannel.Result result = mock(MethodChannel.Result.class);
argumentCaptor.getValue().onMethodCall(new MethodCall("get", null), result);
verifyNoInteractions(result);
restorationChannel.setRestorationData(data);
verify(rawChannel, times(0)).invokeMethod(any(), any());
Map<String, Object> expected = new HashMap<>();
expected.put("enabled", true);
expected.put("data", data);
verify(result).success(expected);
// Next get request is answered right away.
MethodChannel.Result result2 = mock(MethodChannel.Result.class);
argumentCaptor.getValue().onMethodCall(new MethodCall("get", null), result2);
verify(result2).success(expected);
}
@Test
public void itPushesNewData() throws JSONException {
byte[] data = "Any String you want".getBytes();
MethodChannel rawChannel = mock(MethodChannel.class);
RestorationChannel restorationChannel =
new RestorationChannel(rawChannel, /*waitForRestorationData=*/ false);
ArgumentCaptor<MethodChannel.MethodCallHandler> argumentCaptor =
ArgumentCaptor.forClass(MethodChannel.MethodCallHandler.class);
verify(rawChannel).setMethodCallHandler(argumentCaptor.capture());
MethodChannel.Result result = mock(MethodChannel.Result.class);
argumentCaptor.getValue().onMethodCall(new MethodCall("get", null), result);
Map<String, Object> expected = new HashMap<>();
expected.put("enabled", true);
expected.put("data", null);
verify(result).success(expected);
restorationChannel.setRestorationData(data);
assertEquals(restorationChannel.getRestorationData(), null);
ArgumentCaptor<MethodChannel.Result> resultCapture =
ArgumentCaptor.forClass(MethodChannel.Result.class);
Map<String, Object> expected2 = new HashMap<>();
expected2.put("enabled", true);
expected2.put("data", data);
verify(rawChannel).invokeMethod(eq("push"), eq(expected2), resultCapture.capture());
resultCapture.getValue().success(null);
assertEquals(restorationChannel.getRestorationData(), data);
}
@Test
public void itHoldsOnToDataFromFramework() throws JSONException {
byte[] data = "Any String you want".getBytes();
MethodChannel rawChannel = mock(MethodChannel.class);
RestorationChannel restorationChannel =
new RestorationChannel(rawChannel, /*waitForRestorationData=*/ false);
ArgumentCaptor<MethodChannel.MethodCallHandler> argumentCaptor =
ArgumentCaptor.forClass(MethodChannel.MethodCallHandler.class);
verify(rawChannel).setMethodCallHandler(argumentCaptor.capture());
MethodChannel.Result result = mock(MethodChannel.Result.class);
argumentCaptor.getValue().onMethodCall(new MethodCall("put", data), result);
assertEquals(restorationChannel.getRestorationData(), data);
}
}
| engine/shell/platform/android/test/io/flutter/embedding/engine/systemchannels/RestorationChannelTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/embedding/engine/systemchannels/RestorationChannelTest.java",
"repo_id": "engine",
"token_count": 1483
} | 343 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.platform;
import static junit.framework.TestCase.assertFalse;
import static junit.framework.TestCase.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.view.MotionEvent;
import android.view.View;
import android.view.accessibility.AccessibilityEvent;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.view.AccessibilityBridge;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public class AccessibilityEventsDelegateTest {
@Test
public void acessibilityEventsDelegate_forwardsAccessibilityEvents() {
final AccessibilityBridge mockAccessibilityBridge = mock(AccessibilityBridge.class);
final View embeddedView = mock(View.class);
final View originView = mock(View.class);
final AccessibilityEvent event = mock(AccessibilityEvent.class);
AccessibilityEventsDelegate delegate = new AccessibilityEventsDelegate();
delegate.setAccessibilityBridge(mockAccessibilityBridge);
when(mockAccessibilityBridge.externalViewRequestSendAccessibilityEvent(any(), any(), any()))
.thenReturn(true);
final boolean handled = delegate.requestSendAccessibilityEvent(embeddedView, originView, event);
assertTrue(handled);
verify(mockAccessibilityBridge, times(1))
.externalViewRequestSendAccessibilityEvent(embeddedView, originView, event);
}
@Test
public void acessibilityEventsDelegate_withoutBridge_noopsAccessibilityEvents() {
final View embeddedView = mock(View.class);
final View originView = mock(View.class);
final AccessibilityEvent event = mock(AccessibilityEvent.class);
AccessibilityEventsDelegate delegate = new AccessibilityEventsDelegate();
final boolean handled = delegate.requestSendAccessibilityEvent(embeddedView, originView, event);
assertFalse(handled);
}
@Test
public void acessibilityEventsDelegate_forwardsHoverEvents() {
final AccessibilityBridge mockAccessibilityBridge = mock(AccessibilityBridge.class);
final MotionEvent event = mock(MotionEvent.class);
AccessibilityEventsDelegate delegate = new AccessibilityEventsDelegate();
delegate.setAccessibilityBridge(mockAccessibilityBridge);
when(mockAccessibilityBridge.onAccessibilityHoverEvent(any(), anyBoolean())).thenReturn(true);
final boolean handled = delegate.onAccessibilityHoverEvent(event, true);
assertTrue(handled);
verify(mockAccessibilityBridge, times(1)).onAccessibilityHoverEvent(event, true);
}
@Test
public void acessibilityEventsDelegate_withoutBridge_noopsHoverEvents() {
final MotionEvent event = mock(MotionEvent.class);
AccessibilityEventsDelegate delegate = new AccessibilityEventsDelegate();
final boolean handled = delegate.onAccessibilityHoverEvent(event, true);
assertFalse(handled);
}
}
| engine/shell/platform/android/test/io/flutter/plugin/platform/AccessibilityEventsDelegateTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/plugin/platform/AccessibilityEventsDelegateTest.java",
"repo_id": "engine",
"token_count": 938
} | 344 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import android.app.Activity;
import android.content.Context;
import android.content.ContextWrapper;
import android.view.View;
import android.view.ViewGroup;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(AndroidJUnit4.class)
public class ViewUtilsTest {
@Test
public void canGetActivity() {
// Non activity context returns null
Context nonActivityContext = mock(Context.class);
assertEquals(null, ViewUtils.getActivity(nonActivityContext));
Activity activity = mock(Activity.class);
assertEquals(activity, ViewUtils.getActivity(activity));
ContextWrapper wrapper = new ContextWrapper(new ContextWrapper(activity));
assertEquals(activity, ViewUtils.getActivity(wrapper));
}
@Test
public void childHasFocus_rootHasFocus() {
final View rootView = mock(View.class);
when(rootView.hasFocus()).thenReturn(true);
assertTrue(ViewUtils.childHasFocus(rootView));
}
@Test
public void childHasFocus_rootDoesNotHaveFocus() {
final View rootView = mock(View.class);
when(rootView.hasFocus()).thenReturn(false);
assertFalse(ViewUtils.childHasFocus(rootView));
}
@Test
public void childHasFocus_rootIsNull() {
assertFalse(ViewUtils.childHasFocus(null));
}
@Test
public void childHasFocus_childHasFocus() {
final View childView = mock(View.class);
when(childView.hasFocus()).thenReturn(true);
final ViewGroup rootView = mock(ViewGroup.class);
when(rootView.getChildCount()).thenReturn(1);
when(rootView.getChildAt(0)).thenReturn(childView);
assertTrue(ViewUtils.childHasFocus(rootView));
}
@Test
public void childHasFocus_childDoesNotHaveFocus() {
final View childView = mock(View.class);
when(childView.hasFocus()).thenReturn(false);
final ViewGroup rootView = mock(ViewGroup.class);
when(rootView.getChildCount()).thenReturn(1);
when(rootView.getChildAt(0)).thenReturn(childView);
assertFalse(ViewUtils.childHasFocus(rootView));
}
}
| engine/shell/platform/android/test/io/flutter/util/ViewUtilsTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/util/ViewUtilsTest.java",
"repo_id": "engine",
"token_count": 832
} | 345 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_COMMON_APP_LIFECYCLE_STATE_H_
#define FLUTTER_SHELL_PLATFORM_COMMON_APP_LIFECYCLE_STATE_H_
namespace flutter {
/**
* These constants describe the possible lifecycle states of the application.
* They must be kept up to date with changes in the framework's
* AppLifecycleState enum. They are passed to the embedder's |SetLifecycleState|
* function.
*
* States not supported on a platform will be synthesized by the framework when
* transitioning between states which are supported, so that all implementations
* share the same state machine.
*
* Here is the state machine:
*
* +-----------+ +-----------+
* | detached |------------------------------>| resumed |
* +-----------+ +-----------+
* ^ ^
* | |
* | v
* +-----------+ +--------------+ +-----------+
* | paused |<------>| hidden |<----->| inactive |
* +-----------+ +--------------+ +-----------+
*/
enum class AppLifecycleState {
/**
* Corresponds to the Framework's AppLifecycleState.detached: The initial
* state of the state machine. On Android, iOS, and web, also the final state
* of the state machine when all views are detached. Other platforms do not
* re-enter this state after initially leaving it.
*/
kDetached,
/**
* Corresponds to the Framework's AppLifecycleState.resumed: The nominal
* "running" state of the application. The application is visible, has input
* focus, and is running.
*/
kResumed,
/**
* Corresponds to the Framework's AppLifecycleState.inactive: At least one
* view of the application is visible, but none have input focus. The
* application is otherwise running normally.
*/
kInactive,
/**
* Corresponds to the Framework's AppLifecycleState.hidden: All views of an
* application are hidden, either because the application is being stopped (on
* iOS and Android), or because it is being minimized or on a desktop that is
* no longer visible (on desktop), or on a tab that is no longer visible (on
* web).
*/
kHidden,
/**
* Corresponds to the Framework's AppLifecycleState.paused: The application is
* not running, and can be detached or started again at any time. This state
* is typically only entered into on iOS and Android.
*/
kPaused,
};
constexpr const char* AppLifecycleStateToString(AppLifecycleState state) {
switch (state) {
case AppLifecycleState::kDetached:
return "AppLifecycleState.detached";
case AppLifecycleState::kResumed:
return "AppLifecycleState.resumed";
case AppLifecycleState::kInactive:
return "AppLifecycleState.inactive";
case AppLifecycleState::kHidden:
return "AppLifecycleState.hidden";
case AppLifecycleState::kPaused:
return "AppLifecycleState.paused";
}
}
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_COMMON_APP_LIFECYCLE_STATE_H_
| engine/shell/platform/common/app_lifecycle_state.h/0 | {
"file_path": "engine/shell/platform/common/app_lifecycle_state.h",
"repo_id": "engine",
"token_count": 1211
} | 346 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_
#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_
#include <iostream>
#include <memory>
#include <string>
#include "binary_messenger.h"
#include "engine_method_result.h"
#include "event_sink.h"
#include "event_stream_handler.h"
namespace flutter {
class EncodableValue;
// A named channel for communicating with the Flutter application using
// asynchronous event streams. Incoming requests for event stream setup are
// decoded from binary on receipt, and C++ responses and events are encoded into
// binary before being transmitted back to Flutter. The MethodCodec used must be
// compatible with the one used by the Flutter application. This can be achieved
// by creating an EventChannel
// ("https://api.flutter.dev/flutter/services/EventChannel-class.html")
// counterpart of this channel on the Dart side.
// The C++ type of stream configuration arguments, events, and error details are
// templated, but only values supported by the specified MethodCodec can be
// used.
template <typename T = EncodableValue>
class EventChannel {
public:
// Creates an instance that sends and receives event handler on the channel
// named |name|, encoded with |codec| and dispatched via |messenger|.
EventChannel(BinaryMessenger* messenger,
const std::string& name,
const MethodCodec<T>* codec)
: messenger_(messenger), name_(name), codec_(codec) {}
~EventChannel() = default;
// Prevent copying.
EventChannel(EventChannel const&) = delete;
EventChannel& operator=(EventChannel const&) = delete;
// Registers a stream handler on this channel.
// If no handler has been registered, any incoming stream setup requests will
// be handled silently by providing an empty stream.
//
// Note that the EventChannel does not own the handler and will not
// unregister it on destruction. The caller is responsible for unregistering
// the handler if it should no longer be called.
void SetStreamHandler(std::unique_ptr<StreamHandler<T>> handler) {
if (!handler) {
messenger_->SetMessageHandler(name_, nullptr);
return;
}
// std::function requires a copyable lambda, so convert to a shared pointer.
// This is safe since only one copy of the shared_pointer will ever be
// accessed.
std::shared_ptr<StreamHandler<T>> shared_handler(handler.release());
const MethodCodec<T>* codec = codec_;
const std::string channel_name = name_;
const BinaryMessenger* messenger = messenger_;
BinaryMessageHandler binary_handler =
[shared_handler, codec, channel_name, messenger,
// Mutable state to track the handler's listening status.
is_listening = bool(false)](const uint8_t* message,
const size_t message_size,
const BinaryReply& reply) mutable {
constexpr char kOnListenMethod[] = "listen";
constexpr char kOnCancelMethod[] = "cancel";
std::unique_ptr<MethodCall<T>> method_call =
codec->DecodeMethodCall(message, message_size);
if (!method_call) {
std::cerr
<< "Unable to construct method call from message on channel: "
<< channel_name << std::endl;
reply(nullptr, 0);
return;
}
const std::string& method = method_call->method_name();
if (method.compare(kOnListenMethod) == 0) {
if (is_listening) {
std::unique_ptr<StreamHandlerError<T>> error =
shared_handler->OnCancel(nullptr);
if (error) {
std::cerr << "Failed to cancel existing stream: "
<< (error->error_code) << ", "
<< (error->error_message) << ", "
<< (error->error_details);
}
}
is_listening = true;
std::unique_ptr<std::vector<uint8_t>> result;
auto sink = std::make_unique<EventSinkImplementation>(
messenger, channel_name, codec);
std::unique_ptr<StreamHandlerError<T>> error =
shared_handler->OnListen(method_call->arguments(),
std::move(sink));
if (error) {
result = codec->EncodeErrorEnvelope(error->error_code,
error->error_message,
error->error_details.get());
} else {
result = codec->EncodeSuccessEnvelope();
}
reply(result->data(), result->size());
} else if (method.compare(kOnCancelMethod) == 0) {
std::unique_ptr<std::vector<uint8_t>> result;
if (is_listening) {
std::unique_ptr<StreamHandlerError<T>> error =
shared_handler->OnCancel(method_call->arguments());
if (error) {
result = codec->EncodeErrorEnvelope(error->error_code,
error->error_message,
error->error_details.get());
} else {
result = codec->EncodeSuccessEnvelope();
}
is_listening = false;
} else {
result = codec->EncodeErrorEnvelope(
"error", "No active stream to cancel", nullptr);
}
reply(result->data(), result->size());
} else {
reply(nullptr, 0);
}
};
messenger_->SetMessageHandler(name_, std::move(binary_handler));
}
private:
class EventSinkImplementation : public EventSink<T> {
public:
EventSinkImplementation(const BinaryMessenger* messenger,
const std::string& name,
const MethodCodec<T>* codec)
: messenger_(messenger), name_(name), codec_(codec) {}
~EventSinkImplementation() = default;
// Prevent copying.
EventSinkImplementation(EventSinkImplementation const&) = delete;
EventSinkImplementation& operator=(EventSinkImplementation const&) = delete;
private:
const BinaryMessenger* messenger_;
const std::string name_;
const MethodCodec<T>* codec_;
protected:
void SuccessInternal(const T* event = nullptr) override {
auto result = codec_->EncodeSuccessEnvelope(event);
messenger_->Send(name_, result->data(), result->size());
}
void ErrorInternal(const std::string& error_code,
const std::string& error_message,
const T* error_details) override {
auto result =
codec_->EncodeErrorEnvelope(error_code, error_message, error_details);
messenger_->Send(name_, result->data(), result->size());
}
void EndOfStreamInternal() override { messenger_->Send(name_, nullptr, 0); }
};
BinaryMessenger* messenger_;
const std::string name_;
const MethodCodec<T>* codec_;
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_
| engine/shell/platform/common/client_wrapper/include/flutter/event_channel.h/0 | {
"file_path": "engine/shell/platform/common/client_wrapper/include/flutter/event_channel.h",
"repo_id": "engine",
"token_count": 3135
} | 347 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/common/client_wrapper/include/flutter/method_call.h"
#include <memory>
#include <string>
#include "gtest/gtest.h"
namespace flutter {
TEST(MethodCallTest, Basic) {
const std::string method_name("method_name");
const int argument = 42;
MethodCall<int> method_call(method_name, std::make_unique<int>(argument));
EXPECT_EQ(method_call.method_name(), method_name);
ASSERT_NE(method_call.arguments(), nullptr);
EXPECT_EQ(*method_call.arguments(), 42);
}
} // namespace flutter
| engine/shell/platform/common/client_wrapper/method_call_unittests.cc/0 | {
"file_path": "engine/shell/platform/common/client_wrapper/method_call_unittests.cc",
"repo_id": "engine",
"token_count": 226
} | 348 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_COMMON_ENGINE_SWITCHES_H_
#define FLUTTER_SHELL_PLATFORM_COMMON_ENGINE_SWITCHES_H_
#include <string>
#include <vector>
namespace flutter {
// Returns an array of engine switches suitable to pass to the embedder API
// in FlutterProjectArgs, based on parsing variables from the environment in
// the form:
// FLUTTER_ENGINE_SWITCHES=<count>
// FLUTTER_ENGINE_SWITCH_1=...
// FLUTTER_ENGINE_SWITCH_2=...
// ...
// Values should match those in shell/common/switches.h
//
// The returned array does not include the initial dummy argument expected by
// the embedder API, so command_line_argv should not be set directly from it.
//
// In release mode, not all switches from the environment will necessarily be
// returned. See the implementation for details.
std::vector<std::string> GetSwitchesFromEnvironment();
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_COMMON_ENGINE_SWITCHES_H_
| engine/shell/platform/common/engine_switches.h/0 | {
"file_path": "engine/shell/platform/common/engine_switches.h",
"repo_id": "engine",
"token_count": 343
} | 349 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/common/path_utils.h"
#if defined(_WIN32)
#include <windows.h>
#elif defined(__linux__)
#include <linux/limits.h>
#include <unistd.h>
#endif
namespace flutter {
std::filesystem::path GetExecutableDirectory() {
#if defined(_WIN32)
wchar_t buffer[MAX_PATH];
if (GetModuleFileName(nullptr, buffer, MAX_PATH) == 0) {
return std::filesystem::path();
}
std::filesystem::path executable_path(buffer);
return executable_path.remove_filename();
#elif defined(__linux__)
char buffer[PATH_MAX + 1];
ssize_t length = readlink("/proc/self/exe", buffer, sizeof(buffer));
if (length > PATH_MAX) {
return std::filesystem::path();
}
std::filesystem::path executable_path(std::string(buffer, length));
return executable_path.remove_filename();
#else
return std::filesystem::path();
#endif
}
} // namespace flutter
| engine/shell/platform/common/path_utils.cc/0 | {
"file_path": "engine/shell/platform/common/path_utils.cc",
"repo_id": "engine",
"token_count": 342
} | 350 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/common/text_input_model.h"
#include <limits>
#include <map>
#include <vector>
#include "gtest/gtest.h"
namespace flutter {
TEST(TextInputModel, SetText) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetTextWideCharacters) {
auto model = std::make_unique<TextInputModel>();
model->SetText("😄🙃🤪🧐");
EXPECT_STREQ(model->GetText().c_str(), "😄🙃🤪🧐");
}
TEST(TextInputModel, SetTextEmpty) {
auto model = std::make_unique<TextInputModel>();
model->SetText("");
EXPECT_STREQ(model->GetText().c_str(), "");
}
TEST(TextInputModel, SetTextReplaceText) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
model->SetText("");
EXPECT_STREQ(model->GetText().c_str(), "");
}
TEST(TextInputModel, SetTextResetsSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(3)));
EXPECT_EQ(model->selection(), TextRange(3));
model->SetText("FGHJI");
EXPECT_EQ(model->selection(), TextRange(0));
}
TEST(TextInputModel, SetSelectionStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionComposingStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->SetSelection(TextRange(1)));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionComposingMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionComposingEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->SetSelection(TextRange(4)));
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionWthExtent) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
EXPECT_EQ(model->selection(), TextRange(1, 4));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionWthExtentComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_FALSE(model->SetSelection(TextRange(1, 4)));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionReverseExtent) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
EXPECT_EQ(model->selection(), TextRange(4, 1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionReverseExtentComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_FALSE(model->SetSelection(TextRange(4, 1)));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetSelectionOutsideString) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_FALSE(model->SetSelection(TextRange(4, 6)));
EXPECT_FALSE(model->SetSelection(TextRange(5, 6)));
EXPECT_FALSE(model->SetSelection(TextRange(6)));
}
TEST(TextInputModel, SetSelectionOutsideComposingRange) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_FALSE(model->SetSelection(TextRange(0)));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_FALSE(model->SetSelection(TextRange(5)));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
}
TEST(TextInputModel, SetComposingRangeStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(0, 0), 0));
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetComposingRangeMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(2, 2), 0));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(2));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetComposingRangeEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(5, 5), 0));
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(5));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetComposingRangeWithExtent) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetComposingRangeReverseExtent) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SetComposingRangeOutsideString) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_FALSE(model->SetComposingRange(TextRange(4, 6), 0));
EXPECT_FALSE(model->SetComposingRange(TextRange(5, 6), 0));
EXPECT_FALSE(model->SetComposingRange(TextRange(6, 6), 0));
}
// Composing sequence with no initial selection and no text input.
TEST(TextInputModel, CommitComposingNoTextWithNoSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->SetSelection(TextRange(0));
// Verify no changes on BeginComposing.
model->BeginComposing();
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
// Verify no changes on CommitComposing.
model->CommitComposing();
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
// Verify no changes on CommitComposing.
model->EndComposing();
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
// Composing sequence with an initial selection and no text input.
TEST(TextInputModel, CommitComposingNoTextWithSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->SetSelection(TextRange(1, 3));
// Verify no changes on BeginComposing.
model->BeginComposing();
EXPECT_EQ(model->selection(), TextRange(1, 3));
EXPECT_EQ(model->composing_range(), TextRange(1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
// Verify no changes on CommitComposing.
model->CommitComposing();
EXPECT_EQ(model->selection(), TextRange(1, 3));
EXPECT_EQ(model->composing_range(), TextRange(1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
// Verify no changes on CommitComposing.
model->EndComposing();
EXPECT_EQ(model->selection(), TextRange(1, 3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
// Composing sequence with no initial selection.
TEST(TextInputModel, CommitComposingTextWithNoSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->SetSelection(TextRange(1));
// Verify no changes on BeginComposing.
model->BeginComposing();
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
// Verify selection base, extent and composing extent increment as text is
// entered. Verify composing base does not change.
model->UpdateComposingText("つ");
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "AつBCDE");
model->UpdateComposingText("つる");
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "AつるBCDE");
// Verify that cursor position is set to correct offset from composing base.
model->UpdateComposingText("鶴");
EXPECT_TRUE(model->SetSelection(TextRange(1)));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "A鶴BCDE");
// Verify composing base is set to composing extent on commit.
model->CommitComposing();
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(2));
EXPECT_STREQ(model->GetText().c_str(), "A鶴BCDE");
// Verify that further text entry increments the selection base, extent and
// the composing extent. Verify that composing base does not change.
model->UpdateComposingText("が");
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(2, 3));
EXPECT_STREQ(model->GetText().c_str(), "A鶴がBCDE");
// Verify composing base is set to composing extent on commit.
model->CommitComposing();
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(3));
EXPECT_STREQ(model->GetText().c_str(), "A鶴がBCDE");
// Verify no changes on EndComposing.
model->EndComposing();
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "A鶴がBCDE");
}
// Composing sequence with an initial selection.
TEST(TextInputModel, CommitComposingTextWithSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->SetSelection(TextRange(1, 3));
// Verify no changes on BeginComposing.
model->BeginComposing();
EXPECT_EQ(model->selection(), TextRange(1, 3));
EXPECT_EQ(model->composing_range(), TextRange(1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
// Verify selection is replaced and selection base, extent and composing
// extent increment to the position immediately after the composing text.
// Verify composing base does not change.
model->UpdateComposingText("つ");
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "AつDE");
// Verify that further text entry increments the selection base, extent and
// the composing extent. Verify that composing base does not change.
model->UpdateComposingText("つる");
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "AつるDE");
// Verify that cursor position is set to correct offset from composing base.
model->UpdateComposingText("鶴");
EXPECT_TRUE(model->SetSelection(TextRange(1)));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "A鶴DE");
// Verify composing base is set to composing extent on commit.
model->CommitComposing();
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(2));
EXPECT_STREQ(model->GetText().c_str(), "A鶴DE");
// Verify that further text entry increments the selection base, extent and
// the composing extent. Verify that composing base does not change.
model->UpdateComposingText("が");
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(2, 3));
EXPECT_STREQ(model->GetText().c_str(), "A鶴がDE");
// Verify composing base is set to composing extent on commit.
model->CommitComposing();
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(3));
EXPECT_STREQ(model->GetText().c_str(), "A鶴がDE");
// Verify no changes on EndComposing.
model->EndComposing();
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "A鶴がDE");
}
TEST(TextInputModel, UpdateComposingRemovesLastComposingCharacter) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
model->SetComposingRange(TextRange(1, 2), 1);
model->UpdateComposingText("");
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1));
model->SetText("ACDE");
}
TEST(TextInputModel, UpdateSelectionWhileComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
model->SetComposingRange(TextRange(4, 5), 1);
model->UpdateComposingText(u"ぴょんぴょん", TextRange(3, 6));
EXPECT_STREQ(model->GetText().c_str(), "ABCDぴょんぴょん");
EXPECT_EQ(model->selection(), TextRange(7, 10));
EXPECT_EQ(model->composing_range(), TextRange(4, 10));
}
TEST(TextInputModel, AddCodePoint) {
auto model = std::make_unique<TextInputModel>();
model->AddCodePoint('A');
model->AddCodePoint('B');
model->AddCodePoint(0x1f604);
model->AddCodePoint('D');
model->AddCodePoint('E');
EXPECT_EQ(model->selection(), TextRange(6));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AB😄DE");
}
TEST(TextInputModel, AddCodePointSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
model->AddCodePoint('x');
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AxE");
}
TEST(TextInputModel, AddCodePointReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
model->AddCodePoint('x');
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AxE");
}
TEST(TextInputModel, AddCodePointSelectionWideCharacter) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
model->AddCodePoint(0x1f604);
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "A😄E");
}
TEST(TextInputModel, AddCodePointReverseSelectionWideCharacter) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
model->AddCodePoint(0x1f604);
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "A😄E");
}
TEST(TextInputModel, AddText) {
auto model = std::make_unique<TextInputModel>();
model->AddText(u"ABCDE");
model->AddText("😄");
model->AddText("FGHIJ");
EXPECT_EQ(model->selection(), TextRange(12));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE😄FGHIJ");
}
TEST(TextInputModel, AddTextSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
model->AddText("xy");
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AxyE");
}
TEST(TextInputModel, AddTextReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
model->AddText("xy");
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AxyE");
}
TEST(TextInputModel, AddTextSelectionWideCharacter) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
model->AddText(u"😄🙃");
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "A😄🙃E");
}
TEST(TextInputModel, AddTextReverseSelectionWideCharacter) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
model->AddText(u"😄🙃");
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "A😄🙃E");
}
TEST(TextInputModel, DeleteStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "BCDE");
}
TEST(TextInputModel, DeleteMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABDE");
}
TEST(TextInputModel, DeleteEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
ASSERT_FALSE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, DeleteWideCharacters) {
auto model = std::make_unique<TextInputModel>();
model->SetText("😄🙃🤪🧐");
EXPECT_TRUE(model->SetSelection(TextRange(4)));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "😄🙃🧐");
}
TEST(TextInputModel, DeleteSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AE");
}
TEST(TextInputModel, DeleteReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AE");
}
TEST(TextInputModel, DeleteStartComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "ACDE");
}
TEST(TextInputModel, DeleteStartReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 0));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(3, 1));
EXPECT_STREQ(model->GetText().c_str(), "ACDE");
}
TEST(TextInputModel, DeleteMiddleComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "ABDE");
}
TEST(TextInputModel, DeleteMiddleReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 1));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(3, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABDE");
}
TEST(TextInputModel, DeleteEndComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
ASSERT_FALSE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, DeleteEndReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
ASSERT_FALSE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, DeleteSurroundingAtCursor) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(0, 1));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABDE");
}
TEST(TextInputModel, DeleteSurroundingAtCursorComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->DeleteSurrounding(0, 1));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "ABDE");
}
TEST(TextInputModel, DeleteSurroundingAtCursorAll) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(0, 3));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AB");
}
TEST(TextInputModel, DeleteSurroundingAtCursorAllComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->DeleteSurrounding(0, 2));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "ABE");
}
TEST(TextInputModel, DeleteSurroundingAtCursorGreedy) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(0, 4));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AB");
}
TEST(TextInputModel, DeleteSurroundingAtCursorGreedyComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->DeleteSurrounding(0, 4));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "ABE");
}
TEST(TextInputModel, DeleteSurroundingBeforeCursor) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(-1, 1));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ACDE");
}
TEST(TextInputModel, DeleteSurroundingBeforeCursorComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 2));
EXPECT_TRUE(model->DeleteSurrounding(-1, 1));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "ABDE");
}
TEST(TextInputModel, DeleteSurroundingBeforeCursorAll) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(-2, 2));
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "CDE");
}
TEST(TextInputModel, DeleteSurroundingBeforeCursorAllComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 2));
EXPECT_TRUE(model->DeleteSurrounding(-2, 2));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "ADE");
}
TEST(TextInputModel, DeleteSurroundingBeforeCursorGreedy) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(-3, 3));
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "CDE");
}
TEST(TextInputModel, DeleteSurroundingBeforeCursorGreedyComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 2));
EXPECT_TRUE(model->DeleteSurrounding(-3, 3));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "ADE");
}
TEST(TextInputModel, DeleteSurroundingAfterCursor) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(1, 1));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCE");
}
TEST(TextInputModel, DeleteSurroundingAfterCursorComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->DeleteSurrounding(1, 1));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "ABDE");
}
TEST(TextInputModel, DeleteSurroundingAfterCursorAll) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(1, 2));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABC");
}
TEST(TextInputModel, DeleteSurroundingAfterCursorAllComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->DeleteSurrounding(1, 2));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "ABE");
}
TEST(TextInputModel, DeleteSurroundingAfterCursorGreedy) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->DeleteSurrounding(1, 3));
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABC");
}
TEST(TextInputModel, DeleteSurroundingAfterCursorGreedyComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->DeleteSurrounding(1, 3));
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 2));
EXPECT_STREQ(model->GetText().c_str(), "ABE");
}
TEST(TextInputModel, DeleteSurroundingSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2, 3)));
EXPECT_TRUE(model->DeleteSurrounding(0, 1));
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCE");
}
TEST(TextInputModel, DeleteSurroundingReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 3)));
EXPECT_TRUE(model->DeleteSurrounding(0, 1));
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCE");
}
TEST(TextInputModel, BackspaceStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
ASSERT_FALSE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, BackspaceMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
ASSERT_TRUE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ACDE");
}
TEST(TextInputModel, BackspaceEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
ASSERT_TRUE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCD");
}
TEST(TextInputModel, BackspaceWideCharacters) {
auto model = std::make_unique<TextInputModel>();
model->SetText("😄🙃🤪🧐");
EXPECT_TRUE(model->SetSelection(TextRange(4)));
ASSERT_TRUE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "😄🤪🧐");
}
TEST(TextInputModel, BackspaceSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AE");
}
TEST(TextInputModel, BackspaceReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
ASSERT_TRUE(model->Delete());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "AE");
}
TEST(TextInputModel, BackspaceStartComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
ASSERT_FALSE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, BackspaceStartReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 0));
ASSERT_FALSE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, BackspaceMiddleComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
ASSERT_TRUE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "ACDE");
}
TEST(TextInputModel, BackspaceMiddleReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 1));
ASSERT_TRUE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(3, 1));
EXPECT_STREQ(model->GetText().c_str(), "ACDE");
}
TEST(TextInputModel, BackspaceEndComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
ASSERT_TRUE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(1, 3));
EXPECT_STREQ(model->GetText().c_str(), "ABCE");
}
TEST(TextInputModel, BackspaceEndReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
ASSERT_TRUE(model->Backspace());
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(3, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCE");
}
TEST(TextInputModel, MoveCursorForwardStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
EXPECT_FALSE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardWideCharacters) {
auto model = std::make_unique<TextInputModel>();
model->SetText("😄🙃🤪🧐");
EXPECT_TRUE(model->SetSelection(TextRange(4)));
ASSERT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(6));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "😄🙃🤪🧐");
}
TEST(TextInputModel, MoveCursorForwardSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardStartComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardStartReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 0));
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardMiddleComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardMiddleReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 1));
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardEndComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
EXPECT_FALSE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorForwardEndReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
EXPECT_FALSE(model->MoveCursorForward());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
EXPECT_FALSE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
EXPECT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackWideCharacters) {
auto model = std::make_unique<TextInputModel>();
model->SetText("😄🙃🤪🧐");
EXPECT_TRUE(model->SetSelection(TextRange(4)));
ASSERT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(2));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "😄🙃🤪🧐");
}
TEST(TextInputModel, MoveCursorBackSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
EXPECT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
EXPECT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackStartComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->SetSelection(TextRange(1)));
EXPECT_FALSE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackStartReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 0));
EXPECT_TRUE(model->SetSelection(TextRange(1)));
EXPECT_FALSE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackMiddleComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackMiddleReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 1));
EXPECT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackEndComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
EXPECT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorBackEndReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
EXPECT_TRUE(model->MoveCursorBack());
EXPECT_EQ(model->selection(), TextRange(3));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
EXPECT_FALSE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
EXPECT_FALSE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(2, 0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
EXPECT_TRUE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(5, 0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
EXPECT_TRUE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(1, 0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
EXPECT_TRUE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(4, 0));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningStartComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_FALSE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningStartComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_FALSE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningStartReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 0));
EXPECT_FALSE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningStartReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 0));
EXPECT_FALSE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningMiddleComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningMiddleComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(2, 1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningMiddleReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 1));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningMiddleReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 1));
EXPECT_TRUE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(2, 1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningEndComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningEndComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToBeginningEndReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
EXPECT_TRUE(model->MoveCursorToBeginning());
EXPECT_EQ(model->selection(), TextRange(1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToBeginningEndReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
EXPECT_TRUE(model->SelectToBeginning());
EXPECT_EQ(model->selection(), TextRange(4, 1));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
EXPECT_TRUE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndStart) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
EXPECT_TRUE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(0, 5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndMiddle) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(2)));
EXPECT_TRUE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(2, 5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
EXPECT_FALSE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndEnd) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(5)));
EXPECT_FALSE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
EXPECT_TRUE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
EXPECT_TRUE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(1, 5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
EXPECT_TRUE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
EXPECT_TRUE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(4, 5));
EXPECT_EQ(model->composing_range(), TextRange(0));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndStartComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndStartComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(1, 4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndStartReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndStartReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 0));
EXPECT_TRUE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(1, 4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndMiddleComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndMiddleComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 1));
EXPECT_TRUE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(2, 4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndMiddleReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 1));
EXPECT_TRUE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndMiddleReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 1));
EXPECT_TRUE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(2, 4));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndEndComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
EXPECT_FALSE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndEndComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(1, 4), 3));
EXPECT_FALSE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(1, 4));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, MoveCursorToEndEndReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
EXPECT_FALSE(model->MoveCursorToEnd());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, SelectToEndEndReverseComposing) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
model->BeginComposing();
EXPECT_TRUE(model->SetComposingRange(TextRange(4, 1), 3));
EXPECT_FALSE(model->SelectToEnd());
EXPECT_EQ(model->selection(), TextRange(4));
EXPECT_EQ(model->composing_range(), TextRange(4, 1));
EXPECT_STREQ(model->GetText().c_str(), "ABCDE");
}
TEST(TextInputModel, GetCursorOffset) {
auto model = std::make_unique<TextInputModel>();
// These characters take 1, 2, 3 and 4 bytes in UTF-8.
model->SetText("$¢€𐍈");
EXPECT_TRUE(model->SetSelection(TextRange(0)));
EXPECT_EQ(model->GetCursorOffset(), 0);
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->GetCursorOffset(), 1);
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->GetCursorOffset(), 3);
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->GetCursorOffset(), 6);
EXPECT_TRUE(model->MoveCursorForward());
EXPECT_EQ(model->GetCursorOffset(), 10);
}
TEST(TextInputModel, GetCursorOffsetSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(1, 4)));
EXPECT_EQ(model->GetCursorOffset(), 4);
}
TEST(TextInputModel, GetCursorOffsetReverseSelection) {
auto model = std::make_unique<TextInputModel>();
model->SetText("ABCDE");
EXPECT_TRUE(model->SetSelection(TextRange(4, 1)));
EXPECT_EQ(model->GetCursorOffset(), 1);
}
} // namespace flutter
| engine/shell/platform/common/text_input_model_unittests.cc/0 | {
"file_path": "engine/shell/platform/common/text_input_model_unittests.cc",
"repo_id": "engine",
"token_count": 24166
} | 351 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERCODECS_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERCODECS_H_
#import <Foundation/Foundation.h>
#import "FlutterMacros.h"
NS_ASSUME_NONNULL_BEGIN
/**
* A message encoding/decoding mechanism.
*/
FLUTTER_DARWIN_EXPORT
@protocol FlutterMessageCodec
/**
* Returns a shared instance of this `FlutterMessageCodec`.
*/
+ (instancetype)sharedInstance;
/**
* Encodes the specified message into binary.
*
* @param message The message.
* @return The binary encoding, or `nil`, if `message` was `nil`.
*/
- (NSData* _Nullable)encode:(id _Nullable)message;
/**
* Decodes the specified message from binary.
*
* @param message The message.
* @return The decoded message, or `nil`, if `message` was `nil`.
*/
- (id _Nullable)decode:(NSData* _Nullable)message;
@end
/**
* A `FlutterMessageCodec` using unencoded binary messages, represented as
* `NSData` instances.
*
* This codec is guaranteed to be compatible with the corresponding
* [BinaryCodec](https://api.flutter.dev/flutter/services/BinaryCodec-class.html)
* on the Dart side. These parts of the Flutter SDK are evolved synchronously.
*
* On the Dart side, messages are represented using `ByteData`.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterBinaryCodec : NSObject <FlutterMessageCodec>
@end
/**
* A `FlutterMessageCodec` using UTF-8 encoded `NSString` messages.
*
* This codec is guaranteed to be compatible with the corresponding
* [StringCodec](https://api.flutter.dev/flutter/services/StringCodec-class.html)
* on the Dart side. These parts of the Flutter SDK are evolved synchronously.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterStringCodec : NSObject <FlutterMessageCodec>
@end
/**
* A `FlutterMessageCodec` using UTF-8 encoded JSON messages.
*
* This codec is guaranteed to be compatible with the corresponding
* [JSONMessageCodec](https://api.flutter.dev/flutter/services/JSONMessageCodec-class.html)
* on the Dart side. These parts of the Flutter SDK are evolved synchronously.
*
* Supports values accepted by `NSJSONSerialization` plus top-level
* `nil`, `NSNumber`, and `NSString`.
*
* On the Dart side, JSON messages are handled by the JSON facilities of the
* [`dart:convert`](https://api.dartlang.org/stable/dart-convert/JSON-constant.html)
* package.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterJSONMessageCodec : NSObject <FlutterMessageCodec>
@end
/**
* A writer of the Flutter standard binary encoding.
*
* See `FlutterStandardMessageCodec` for details on the encoding.
*
* The encoding is extensible via subclasses overriding `writeValue`.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterStandardWriter : NSObject
/**
* Create a `FlutterStandardWriter` who will write to \p data.
*/
- (instancetype)initWithData:(NSMutableData*)data;
/** Write a 8-bit byte. */
- (void)writeByte:(UInt8)value;
/** Write an array of \p bytes of size \p length. */
- (void)writeBytes:(const void*)bytes length:(NSUInteger)length;
/** Write an array of bytes contained in \p data. */
- (void)writeData:(NSData*)data;
/** Write 32-bit unsigned integer that represents a \p size of a collection. */
- (void)writeSize:(UInt32)size;
/** Write zero padding until data is aligned with \p alignment. */
- (void)writeAlignment:(UInt8)alignment;
/** Write a string with UTF-8 encoding. */
- (void)writeUTF8:(NSString*)value;
/** Introspects into an object and writes its representation.
*
* Supported Data Types:
* - NSNull
* - NSNumber
* - NSString (as UTF-8)
* - FlutterStandardTypedData
* - NSArray of supported types
* - NSDictionary of supporte types
*
* NSAsserts on failure.
*/
- (void)writeValue:(id)value;
@end
/**
* A reader of the Flutter standard binary encoding.
*
* See `FlutterStandardMessageCodec` for details on the encoding.
*
* The encoding is extensible via subclasses overriding `readValueOfType`.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterStandardReader : NSObject
/**
* Create a new `FlutterStandardReader` who reads from \p data.
*/
- (instancetype)initWithData:(NSData*)data;
/** Returns YES when the reader hasn't reached the end of its data. */
- (BOOL)hasMore;
/** Reads a byte value and increments the position. */
- (UInt8)readByte;
/** Reads a sequence of byte values of \p length and increments the position. */
- (void)readBytes:(void*)destination length:(NSUInteger)length;
/** Reads a sequence of byte values of \p length and increments the position. */
- (NSData*)readData:(NSUInteger)length;
/** Reads a 32-bit unsigned integer representing a collection size and increments the position.*/
- (UInt32)readSize;
/** Advances the read position until it is aligned with \p alignment. */
- (void)readAlignment:(UInt8)alignment;
/** Read a null terminated string encoded with UTF-8/ */
- (NSString*)readUTF8;
/**
* Reads a byte for `FlutterStandardField` the decodes a value matching that type.
*
* See also: -[FlutterStandardWriter writeValue]
*/
- (nullable id)readValue;
/**
* Decodes a value matching the \p type specified.
*
* See also:
* - `FlutterStandardField`
* - `-[FlutterStandardWriter writeValue]`
*/
- (nullable id)readValueOfType:(UInt8)type;
@end
/**
* A factory of compatible reader/writer instances using the Flutter standard
* binary encoding or extensions thereof.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterStandardReaderWriter : NSObject
/**
* Create a new `FlutterStandardWriter` for writing to \p data.
*/
- (FlutterStandardWriter*)writerWithData:(NSMutableData*)data;
/**
* Create a new `FlutterStandardReader` for reading from \p data.
*/
- (FlutterStandardReader*)readerWithData:(NSData*)data;
@end
/**
* A `FlutterMessageCodec` using the Flutter standard binary encoding.
*
* This codec is guaranteed to be compatible with the corresponding
* [StandardMessageCodec](https://api.flutter.dev/flutter/services/StandardMessageCodec-class.html)
* on the Dart side. These parts of the Flutter SDK are evolved synchronously.
*
* Supported messages are acyclic values of these forms:
*
* - `nil` or `NSNull`
* - `NSNumber` (including their representation of Boolean values)
* - `NSString`
* - `FlutterStandardTypedData`
* - `NSArray` of supported values
* - `NSDictionary` with supported keys and values
*
* On the Dart side, these values are represented as follows:
*
* - `nil` or `NSNull`: null
* - `NSNumber`: `bool`, `int`, or `double`, depending on the contained value.
* - `NSString`: `String`
* - `FlutterStandardTypedData`: `Uint8List`, `Int32List`, `Int64List`, or `Float64List`
* - `NSArray`: `List`
* - `NSDictionary`: `Map`
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterStandardMessageCodec : NSObject <FlutterMessageCodec>
/**
* Create a `FlutterStandardMessageCodec` who will read and write to \p readerWriter.
*/
+ (instancetype)codecWithReaderWriter:(FlutterStandardReaderWriter*)readerWriter;
@end
/**
* Command object representing a method call on a `FlutterMethodChannel`.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterMethodCall : NSObject
/**
* Creates a method call for invoking the specified named method with the
* specified arguments.
*
* @param method the name of the method to call.
* @param arguments the arguments value.
*/
+ (instancetype)methodCallWithMethodName:(NSString*)method arguments:(id _Nullable)arguments;
/**
* The method name.
*/
@property(readonly, nonatomic) NSString* method;
/**
* The arguments.
*/
@property(readonly, nonatomic, nullable) id arguments;
@end
/**
* Error object representing an unsuccessful outcome of invoking a method
* on a `FlutterMethodChannel`, or an error event on a `FlutterEventChannel`.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterError : NSObject
/**
* Creates a `FlutterError` with the specified error code, message, and details.
*
* @param code An error code string for programmatic use.
* @param message A human-readable error message.
* @param details Custom error details.
*/
+ (instancetype)errorWithCode:(NSString*)code
message:(NSString* _Nullable)message
details:(id _Nullable)details;
/**
The error code.
*/
@property(readonly, nonatomic) NSString* code;
/**
The error message.
*/
@property(readonly, nonatomic, nullable) NSString* message;
/**
The error details.
*/
@property(readonly, nonatomic, nullable) id details;
@end
/**
* Type of numeric data items encoded in a `FlutterStandardDataType`.
*
* - FlutterStandardDataTypeUInt8: plain bytes
* - FlutterStandardDataTypeInt32: 32-bit signed integers
* - FlutterStandardDataTypeInt64: 64-bit signed integers
* - FlutterStandardDataTypeFloat64: 64-bit floats
*/
typedef NS_ENUM(NSInteger, FlutterStandardDataType) {
// NOLINTBEGIN(readability-identifier-naming)
FlutterStandardDataTypeUInt8,
FlutterStandardDataTypeInt32,
FlutterStandardDataTypeInt64,
FlutterStandardDataTypeFloat32,
FlutterStandardDataTypeFloat64,
// NOLINTEND(readability-identifier-naming)
};
/**
* A byte buffer holding `UInt8`, `SInt32`, `SInt64`, or `Float64` values, used
* with `FlutterStandardMessageCodec` and `FlutterStandardMethodCodec`.
*
* Two's complement encoding is used for signed integers. IEEE754
* double-precision representation is used for floats. The platform's native
* endianness is assumed.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterStandardTypedData : NSObject
/**
* Creates a `FlutterStandardTypedData` which interprets the specified data
* as plain bytes.
*
* @param data the byte data.
*/
+ (instancetype)typedDataWithBytes:(NSData*)data;
/**
* Creates a `FlutterStandardTypedData` which interprets the specified data
* as 32-bit signed integers.
*
* @param data the byte data. The length must be divisible by 4.
*/
+ (instancetype)typedDataWithInt32:(NSData*)data;
/**
* Creates a `FlutterStandardTypedData` which interprets the specified data
* as 64-bit signed integers.
*
* @param data the byte data. The length must be divisible by 8.
*/
+ (instancetype)typedDataWithInt64:(NSData*)data;
/**
* Creates a `FlutterStandardTypedData` which interprets the specified data
* as 32-bit floats.
*
* @param data the byte data. The length must be divisible by 8.
*/
+ (instancetype)typedDataWithFloat32:(NSData*)data;
/**
* Creates a `FlutterStandardTypedData` which interprets the specified data
* as 64-bit floats.
*
* @param data the byte data. The length must be divisible by 8.
*/
+ (instancetype)typedDataWithFloat64:(NSData*)data;
/**
* The raw underlying data buffer.
*/
@property(readonly, nonatomic) NSData* data;
/**
* The type of the encoded values.
*/
@property(readonly, nonatomic, assign) FlutterStandardDataType type;
/**
* The number of value items encoded.
*/
@property(readonly, nonatomic, assign) UInt32 elementCount;
/**
* The number of bytes used by the encoding of a single value item.
*/
@property(readonly, nonatomic, assign) UInt8 elementSize;
@end
/**
* An arbitrarily large integer value, used with `FlutterStandardMessageCodec`
* and `FlutterStandardMethodCodec`.
*/
FLUTTER_DARWIN_EXPORT
FLUTTER_UNAVAILABLE("Unavailable on 2018-08-31. Deprecated on 2018-01-09. "
"FlutterStandardBigInteger was needed because the Dart 1.0 int type had no "
"size limit. With Dart 2.0, the int type is a fixed-size, 64-bit signed "
"integer. If you need to communicate larger integers, use NSString encoding "
"instead.")
@interface FlutterStandardBigInteger : NSObject
@end
/**
* A codec for method calls and enveloped results.
*
* Method calls are encoded as binary messages with enough structure that the
* codec can extract a method name `NSString` and an arguments `NSObject`,
* possibly `nil`. These data items are used to populate a `FlutterMethodCall`.
*
* Result envelopes are encoded as binary messages with enough structure that
* the codec can determine whether the result was successful or an error. In
* the former case, the codec can extract the result `NSObject`, possibly `nil`.
* In the latter case, the codec can extract an error code `NSString`, a
* human-readable `NSString` error message (possibly `nil`), and a custom
* error details `NSObject`, possibly `nil`. These data items are used to
* populate a `FlutterError`.
*/
FLUTTER_DARWIN_EXPORT
@protocol FlutterMethodCodec
/**
* Provides access to a shared instance this codec.
*
* @return The shared instance.
*/
+ (instancetype)sharedInstance;
/**
* Encodes the specified method call into binary.
*
* @param methodCall The method call. The arguments value
* must be supported by this codec.
* @return The binary encoding.
*/
- (NSData*)encodeMethodCall:(FlutterMethodCall*)methodCall;
/**
* Decodes the specified method call from binary.
*
* @param methodCall The method call to decode.
* @return The decoded method call.
*/
- (FlutterMethodCall*)decodeMethodCall:(NSData*)methodCall;
/**
* Encodes the specified successful result into binary.
*
* @param result The result. Must be a value supported by this codec.
* @return The binary encoding.
*/
- (NSData*)encodeSuccessEnvelope:(id _Nullable)result;
/**
* Encodes the specified error result into binary.
*
* @param error The error object. The error details value must be supported
* by this codec.
* @return The binary encoding.
*/
- (NSData*)encodeErrorEnvelope:(FlutterError*)error;
/**
* Deccodes the specified result envelope from binary.
*
* @param envelope The error object.
* @return The result value, if the envelope represented a successful result,
* or a `FlutterError` instance, if not.
*/
- (id _Nullable)decodeEnvelope:(NSData*)envelope;
@end
/**
* A `FlutterMethodCodec` using UTF-8 encoded JSON method calls and result
* envelopes.
*
* This codec is guaranteed to be compatible with the corresponding
* [JSONMethodCodec](https://api.flutter.dev/flutter/services/JSONMethodCodec-class.html)
* on the Dart side. These parts of the Flutter SDK are evolved synchronously.
*
* Values supported as methods arguments and result payloads are
* those supported as top-level or leaf values by `FlutterJSONMessageCodec`.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterJSONMethodCodec : NSObject <FlutterMethodCodec>
@end
/**
* A `FlutterMethodCodec` using the Flutter standard binary encoding.
*
* This codec is guaranteed to be compatible with the corresponding
* [StandardMethodCodec](https://api.flutter.dev/flutter/services/StandardMethodCodec-class.html)
* on the Dart side. These parts of the Flutter SDK are evolved synchronously.
*
* Values supported as method arguments and result payloads are those supported by
* `FlutterStandardMessageCodec`.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterStandardMethodCodec : NSObject <FlutterMethodCodec>
/**
* Create a `FlutterStandardMethodCodec` who will read and write to \p readerWriter.
*/
+ (instancetype)codecWithReaderWriter:(FlutterStandardReaderWriter*)readerWriter;
@end
NS_ASSUME_NONNULL_END
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERCODECS_H_
| engine/shell/platform/darwin/common/framework/Headers/FlutterCodecs.h/0 | {
"file_path": "engine/shell/platform/darwin/common/framework/Headers/FlutterCodecs.h",
"repo_id": "engine",
"token_count": 4669
} | 352 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_SOURCE_FLUTTERTESTUTILS_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_SOURCE_FLUTTERTESTUTILS_H_
#import <Foundation/Foundation.h>
/// Returns YES if the block throws an exception.
BOOL FLTThrowsObjcException(dispatch_block_t block);
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_SOURCE_FLUTTERTESTUTILS_H_
| engine/shell/platform/darwin/common/framework/Source/FlutterTestUtils.h/0 | {
"file_path": "engine/shell/platform/darwin/common/framework/Source/FlutterTestUtils.h",
"repo_id": "engine",
"token_count": 205
} | 353 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTERAPPDELEGATE_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTERAPPDELEGATE_H_
#import <UIKit/UIKit.h>
#import "FlutterMacros.h"
#import "FlutterPlugin.h"
/**
* `UIApplicationDelegate` subclass for simple apps that want default behavior.
*
* This class implements the following behaviors:
* * Status bar touches are forwarded to the key window's root view
* `FlutterViewController`, in order to trigger scroll to top.
* * Keeps the Flutter connection open in debug mode when the phone screen
* locks.
*
* App delegates for Flutter applications are *not* required to inherit from
* this class. Developers of custom app delegate classes should copy and paste
* code as necessary from FlutterAppDelegate.mm.
*/
FLUTTER_DARWIN_EXPORT
@interface FlutterAppDelegate
: UIResponder <UIApplicationDelegate, FlutterPluginRegistry, FlutterAppLifeCycleProvider>
@property(strong, nonatomic) UIWindow* window;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTERAPPDELEGATE_H_
| engine/shell/platform/darwin/ios/framework/Headers/FlutterAppDelegate.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Headers/FlutterAppDelegate.h",
"repo_id": "engine",
"token_count": 420
} | 354 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERCHANNELKEYRESPONDER_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERCHANNELKEYRESPONDER_H_
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterKeyPrimaryResponder.h"
#import <UIKit/UIKit.h>
#import "flutter/shell/platform/darwin/common/framework/Headers/FlutterChannels.h"
/**
* A primary responder of |FlutterKeyboardManager| that handles events by
* sending the raw information through a method channel.
*
* This class corresponds to the RawKeyboard API in the framework.
*/
@interface FlutterChannelKeyResponder : NSObject <FlutterKeyPrimaryResponder>
/**
* Create an instance by specifying the method channel to use.
*/
- (nonnull instancetype)initWithChannel:(nonnull FlutterBasicMessageChannel*)channel;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERCHANNELKEYRESPONDER_H_
| engine/shell/platform/darwin/ios/framework/Source/FlutterChannelKeyResponder.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterChannelKeyResponder.h",
"repo_id": "engine",
"token_count": 362
} | 355 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERENGINE_INTERNAL_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERENGINE_INTERNAL_H_
#import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterEngine.h"
#include "flutter/fml/memory/weak_ptr.h"
#include "flutter/fml/task_runner.h"
#include "flutter/lib/ui/window/pointer_data_packet.h"
#include "flutter/lib/ui/window/viewport_metrics.h"
#include "flutter/shell/common/platform_view.h"
#include "flutter/shell/common/rasterizer.h"
#include "flutter/shell/common/shell.h"
#include "flutter/shell/platform/embedder/embedder.h"
#import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterEngine.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterDartProject_Internal.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterIndirectScribbleDelegate.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterPlatformPlugin.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterRestorationPlugin.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterTextInputDelegate.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterTextInputPlugin.h"
#import "flutter/shell/platform/darwin/ios/platform_view_ios.h"
NS_ASSUME_NONNULL_BEGIN
extern NSString* const kFlutterEngineWillDealloc;
@interface FlutterEngine () <FlutterViewEngineDelegate>
- (flutter::Shell&)shell;
- (void)updateViewportMetrics:(flutter::ViewportMetrics)viewportMetrics;
- (void)dispatchPointerDataPacket:(std::unique_ptr<flutter::PointerDataPacket>)packet;
- (fml::RefPtr<fml::TaskRunner>)platformTaskRunner;
- (fml::RefPtr<fml::TaskRunner>)uiTaskRunner;
- (fml::RefPtr<fml::TaskRunner>)rasterTaskRunner;
- (fml::WeakPtr<flutter::PlatformView>)platformView;
- (flutter::Rasterizer::Screenshot)screenshot:(flutter::Rasterizer::ScreenshotType)type
base64Encode:(bool)base64Encode;
- (FlutterPlatformPlugin*)platformPlugin;
- (std::shared_ptr<flutter::FlutterPlatformViewsController>&)platformViewsController;
- (FlutterTextInputPlugin*)textInputPlugin;
- (FlutterRestorationPlugin*)restorationPlugin;
- (void)launchEngine:(nullable NSString*)entrypoint
libraryURI:(nullable NSString*)libraryOrNil
entrypointArgs:(nullable NSArray<NSString*>*)entrypointArgs;
- (BOOL)createShell:(nullable NSString*)entrypoint
libraryURI:(nullable NSString*)libraryOrNil
initialRoute:(nullable NSString*)initialRoute;
- (void)attachView;
- (void)notifyLowMemory;
- (flutter::PlatformViewIOS*)iosPlatformView;
- (void)waitForFirstFrame:(NSTimeInterval)timeout callback:(void (^)(BOOL didTimeout))callback;
/**
* Creates one running FlutterEngine from another, sharing components between them.
*
* This results in a faster creation time and a smaller memory footprint engine.
* This should only be called on a FlutterEngine that is running.
*/
- (FlutterEngine*)spawnWithEntrypoint:(nullable NSString*)entrypoint
libraryURI:(nullable NSString*)libraryURI
initialRoute:(nullable NSString*)initialRoute
entrypointArgs:(nullable NSArray<NSString*>*)entrypointArgs;
/**
* Dispatches the given key event data to the framework through the engine.
* The callback is called once the response from the framework is received.
*/
- (void)sendKeyEvent:(const FlutterKeyEvent&)event
callback:(nullable FlutterKeyEventCallback)callback
userData:(nullable void*)userData;
@property(nonatomic, readonly) FlutterDartProject* project;
@end
NS_ASSUME_NONNULL_END
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERENGINE_INTERNAL_H_
| engine/shell/platform/darwin/ios/framework/Source/FlutterEngine_Internal.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterEngine_Internal.h",
"repo_id": "engine",
"token_count": 1417
} | 356 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERPLATFORMPLUGIN_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERPLATFORMPLUGIN_H_
#include "flutter/fml/platform/darwin/weak_nsobject.h"
#import "flutter/shell/platform/darwin/common/framework/Headers/FlutterChannels.h"
#import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterEngine.h"
@interface FlutterPlatformPlugin : NSObject
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)initWithEngine:(fml::WeakNSObject<FlutterEngine>)engine NS_DESIGNATED_INITIALIZER;
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result;
@end
namespace flutter {
extern const char* const kOrientationUpdateNotificationName;
extern const char* const kOrientationUpdateNotificationKey;
extern const char* const kOverlayStyleUpdateNotificationName;
extern const char* const kOverlayStyleUpdateNotificationKey;
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERPLATFORMPLUGIN_H_
| engine/shell/platform/darwin/ios/framework/Source/FlutterPlatformPlugin.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterPlatformPlugin.h",
"repo_id": "engine",
"token_count": 433
} | 357 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterSpellCheckPlugin.h"
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "flutter/fml/logging.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterViewController_Internal.h"
// Method Channel name to start spell check.
static NSString* const kInitiateSpellCheck = @"SpellCheck.initiateSpellCheck";
@interface FlutterSpellCheckPlugin ()
@property(nonatomic, retain) UITextChecker* textChecker;
@end
@implementation FlutterSpellCheckPlugin
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
if (!_textChecker) {
// UITextChecker is an expensive object to initiate, see:
// https://github.com/flutter/flutter/issues/104454. Lazily initialate the UITextChecker object
// until at first method channel call. We avoid using lazy getter for testing.
_textChecker = [[UITextChecker alloc] init];
}
NSString* method = call.method;
NSArray* args = call.arguments;
if ([method isEqualToString:kInitiateSpellCheck]) {
FML_DCHECK(args.count == 2);
id language = args[0];
id text = args[1];
if (language == [NSNull null] || text == [NSNull null]) {
// Bail if null arguments are passed from dart.
result(nil);
return;
}
NSArray<NSDictionary<NSString*, id>*>* spellCheckResult =
[self findAllSpellCheckSuggestionsForText:text inLanguage:language];
result(spellCheckResult);
}
}
// Get all the misspelled words and suggestions in the entire String.
//
// The result will be formatted as an NSArray.
// Each item of the array is a dictionary representing a misspelled word and suggestions.
// The format looks like:
// {
// startIndex: 0,
// endIndex: 5,
// suggestions: [hello, ...]
// }
//
// Returns nil if the language is invalid.
// Returns an empty array if no spell check suggestions.
- (NSArray<NSDictionary<NSString*, id>*>*)findAllSpellCheckSuggestionsForText:(NSString*)text
inLanguage:(NSString*)language {
// Transform Dart Locale format to iOS language format if necessary.
if ([language containsString:@"-"]) {
NSArray<NSString*>* languageCodes = [language componentsSeparatedByString:@"-"];
FML_DCHECK(languageCodes.count == 2);
NSString* lastCode = [[languageCodes lastObject] uppercaseString];
language = [NSString stringWithFormat:@"%@_%@", [languageCodes firstObject], lastCode];
}
if (![UITextChecker.availableLanguages containsObject:language]) {
return nil;
}
NSMutableArray<FlutterSpellCheckResult*>* allSpellSuggestions = [[NSMutableArray alloc] init];
FlutterSpellCheckResult* nextSpellSuggestion;
NSUInteger nextOffset = 0;
do {
nextSpellSuggestion = [self findSpellCheckSuggestionsForText:text
inLanguage:language
startingOffset:nextOffset];
if (nextSpellSuggestion != nil) {
[allSpellSuggestions addObject:nextSpellSuggestion];
nextOffset =
nextSpellSuggestion.misspelledRange.location + nextSpellSuggestion.misspelledRange.length;
}
} while (nextSpellSuggestion != nil && nextOffset < text.length);
NSMutableArray* methodChannelResult = [[[NSMutableArray alloc] init] autorelease];
for (FlutterSpellCheckResult* result in allSpellSuggestions) {
[methodChannelResult addObject:[result toDictionary]];
}
[allSpellSuggestions release];
return methodChannelResult;
}
// Get the misspelled word and suggestions.
//
// Returns nil if no spell check suggestions.
- (FlutterSpellCheckResult*)findSpellCheckSuggestionsForText:(NSString*)text
inLanguage:(NSString*)language
startingOffset:(NSInteger)startingOffset {
FML_DCHECK([UITextChecker.availableLanguages containsObject:language]);
NSRange misspelledRange =
[self.textChecker rangeOfMisspelledWordInString:text
range:NSMakeRange(0, text.length)
startingAt:startingOffset
wrap:NO
language:language];
if (misspelledRange.location == NSNotFound) {
// No misspelled word found
return nil;
}
// If no possible guesses, the API returns an empty array:
// https://developer.apple.com/documentation/uikit/uitextchecker/1621037-guessesforwordrange?language=objc
NSArray<NSString*>* suggestions = [self.textChecker guessesForWordRange:misspelledRange
inString:text
language:language];
FlutterSpellCheckResult* result =
[[[FlutterSpellCheckResult alloc] initWithMisspelledRange:misspelledRange
suggestions:suggestions] autorelease];
return result;
}
- (UITextChecker*)textChecker {
return _textChecker;
}
- (void)dealloc {
[_textChecker release];
[super dealloc];
}
@end
@implementation FlutterSpellCheckResult
- (instancetype)initWithMisspelledRange:(NSRange)range
suggestions:(NSArray<NSString*>*)suggestions {
self = [super init];
if (self) {
_suggestions = [suggestions copy];
_misspelledRange = range;
}
return self;
}
- (NSDictionary<NSString*, NSObject*>*)toDictionary {
NSMutableDictionary* result = [[[NSMutableDictionary alloc] initWithCapacity:3] autorelease];
result[@"startIndex"] = @(_misspelledRange.location);
// The end index represents the next index after the last character of a misspelled word to match
// the behavior of Dart's TextRange: https://api.flutter.dev/flutter/dart-ui/TextRange/end.html
result[@"endIndex"] = @(_misspelledRange.location + _misspelledRange.length);
result[@"suggestions"] = _suggestions;
return result;
}
- (void)dealloc {
[_suggestions release];
[super dealloc];
}
@end
| engine/shell/platform/darwin/ios/framework/Source/FlutterSpellCheckPlugin.mm/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterSpellCheckPlugin.mm",
"repo_id": "engine",
"token_count": 2440
} | 358 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterUndoManagerPlugin.h"
#import <OCMock/OCMock.h>
#import <XCTest/XCTest.h>
#import "flutter/shell/platform/darwin/common/framework/Headers/FlutterMacros.h"
#import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterEngine.h"
#import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterViewController.h"
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterTextInputPlugin.h"
FLUTTER_ASSERT_ARC
@interface FlutterEngine ()
- (nonnull FlutterUndoManagerPlugin*)undoManagerPlugin;
- (nonnull FlutterTextInputPlugin*)textInputPlugin;
@end
@interface FlutterUndoManagerPluginForTest : FlutterUndoManagerPlugin
@property(nonatomic, assign) NSUndoManager* undoManager;
@end
@implementation FlutterUndoManagerPluginForTest {
}
@end
@interface FlutterUndoManagerPluginTest : XCTestCase
@property(nonatomic, strong) id engine;
@property(nonatomic, strong) FlutterUndoManagerPluginForTest* undoManagerPlugin;
@property(nonatomic, strong) FlutterViewController* viewController;
@property(nonatomic, strong) NSUndoManager* undoManager;
@end
@implementation FlutterUndoManagerPluginTest {
}
- (void)setUp {
[super setUp];
self.engine = OCMClassMock([FlutterEngine class]);
self.undoManagerPlugin = [[FlutterUndoManagerPluginForTest alloc] initWithDelegate:self.engine];
self.viewController = [[FlutterViewController alloc] init];
self.undoManagerPlugin.viewController = self.viewController;
self.undoManager = OCMClassMock([NSUndoManager class]);
self.undoManagerPlugin.undoManager = self.undoManager;
}
- (void)tearDown {
[self.undoManager removeAllActionsWithTarget:self.undoManagerPlugin];
self.engine = nil;
self.viewController = nil;
self.undoManager = nil;
[super tearDown];
}
- (void)testSetUndoState {
__block int registerUndoCount = 0;
__block void (^undoHandler)(id target);
OCMStub([self.undoManager registerUndoWithTarget:self.undoManagerPlugin handler:[OCMArg any]])
.andDo(^(NSInvocation* invocation) {
registerUndoCount++;
__weak void (^handler)(id target);
[invocation retainArguments];
[invocation getArgument:&handler atIndex:3];
undoHandler = handler;
});
__block int removeAllActionsCount = 0;
OCMStub([self.undoManager removeAllActionsWithTarget:self.undoManagerPlugin])
.andDo(^(NSInvocation* invocation) {
removeAllActionsCount++;
});
__block int delegateUndoCount = 0;
OCMStub([self.engine flutterUndoManagerPlugin:[OCMArg any]
handleUndoWithDirection:FlutterUndoRedoDirectionUndo])
.andDo(^(NSInvocation* invocation) {
delegateUndoCount++;
});
__block int delegateRedoCount = 0;
OCMStub([self.engine flutterUndoManagerPlugin:[OCMArg any]
handleUndoWithDirection:FlutterUndoRedoDirectionRedo])
.andDo(^(NSInvocation* invocation) {
delegateRedoCount++;
});
__block int undoCount = 0;
OCMStub([self.undoManager undo]).andDo(^(NSInvocation* invocation) {
undoCount++;
undoHandler(self.undoManagerPlugin);
});
// If canUndo and canRedo are false, only removeAllActionsWithTarget is called.
FlutterMethodCall* setUndoStateCall =
[FlutterMethodCall methodCallWithMethodName:@"UndoManager.setUndoState"
arguments:@{@"canUndo" : @NO, @"canRedo" : @NO}];
[self.undoManagerPlugin handleMethodCall:setUndoStateCall
result:^(id _Nullable result){
}];
XCTAssertEqual(1, removeAllActionsCount);
XCTAssertEqual(0, registerUndoCount);
// If canUndo is true, an undo will be registered.
setUndoStateCall =
[FlutterMethodCall methodCallWithMethodName:@"UndoManager.setUndoState"
arguments:@{@"canUndo" : @YES, @"canRedo" : @NO}];
[self.undoManagerPlugin handleMethodCall:setUndoStateCall
result:^(id _Nullable result){
}];
XCTAssertEqual(2, removeAllActionsCount);
XCTAssertEqual(1, registerUndoCount);
// Invoking the undo handler will invoke the handleUndo delegate method with "undo".
undoHandler(self.undoManagerPlugin);
XCTAssertEqual(1, delegateUndoCount);
XCTAssertEqual(0, delegateRedoCount);
XCTAssertEqual(2, registerUndoCount);
// Invoking the redo handler will invoke the handleUndo delegate method with "redo".
undoHandler(self.undoManagerPlugin);
XCTAssertEqual(1, delegateUndoCount);
XCTAssertEqual(1, delegateRedoCount);
XCTAssertEqual(3, registerUndoCount);
// If canRedo is true, an undo will be registered and undo will be called.
setUndoStateCall =
[FlutterMethodCall methodCallWithMethodName:@"UndoManager.setUndoState"
arguments:@{@"canUndo" : @NO, @"canRedo" : @YES}];
[self.undoManagerPlugin handleMethodCall:setUndoStateCall
result:^(id _Nullable result){
}];
XCTAssertEqual(3, removeAllActionsCount);
XCTAssertEqual(5, registerUndoCount);
XCTAssertEqual(1, undoCount);
// Invoking the redo handler will invoke the handleUndo delegate method with "redo".
undoHandler(self.undoManagerPlugin);
XCTAssertEqual(1, delegateUndoCount);
XCTAssertEqual(2, delegateRedoCount);
}
- (void)testSetUndoStateDoesInteractWithInputDelegate {
// Regression test for https://github.com/flutter/flutter/issues/133424
FlutterViewController* viewController = OCMPartialMock(self.viewController);
self.undoManagerPlugin.viewController = self.viewController;
FlutterTextInputPlugin* textInputPlugin = OCMClassMock([FlutterTextInputPlugin class]);
FlutterTextInputView* textInputView = OCMClassMock([FlutterTextInputView class]);
OCMStub([viewController engine]).andReturn(self.engine);
OCMStub([self.engine textInputPlugin]).andReturn(textInputPlugin);
OCMStub([textInputPlugin textInputView]).andReturn(textInputView);
FlutterMethodCall* setUndoStateCall =
[FlutterMethodCall methodCallWithMethodName:@"UndoManager.setUndoState"
arguments:@{@"canUndo" : @NO, @"canRedo" : @NO}];
[self.undoManagerPlugin handleMethodCall:setUndoStateCall
result:^(id _Nullable result){
}];
OCMVerify(never(), [textInputView inputDelegate]);
}
@end
| engine/shell/platform/darwin/ios/framework/Source/FlutterUndoManagerPluginTest.mm/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterUndoManagerPluginTest.mm",
"repo_id": "engine",
"token_count": 2589
} | 359 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "flutter/shell/platform/darwin/ios/framework/Source/UIViewController+FlutterScreenAndSceneIfLoaded.h"
#include "flutter/fml/logging.h"
@implementation UIViewController (FlutterScreenAndSceneIfLoaded)
- (UIWindowScene*)flutterWindowSceneIfViewLoaded {
if (self.viewIfLoaded == nil) {
FML_LOG(WARNING) << "Trying to access the window scene before the view is loaded.";
return nil;
}
return self.viewIfLoaded.window.windowScene;
}
- (UIScreen*)flutterScreenIfViewLoaded {
if (@available(iOS 13.0, *)) {
if (self.viewIfLoaded == nil) {
FML_LOG(WARNING) << "Trying to access the screen before the view is loaded.";
return nil;
}
return [self flutterWindowSceneIfViewLoaded].screen;
}
return UIScreen.mainScreen;
}
@end
| engine/shell/platform/darwin/ios/framework/Source/UIViewController+FlutterScreenAndSceneIfLoaded.mm/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/UIViewController+FlutterScreenAndSceneIfLoaded.mm",
"repo_id": "engine",
"token_count": 320
} | 360 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "flutter/shell/platform/darwin/ios/framework/Source/profiler_metrics_ios.h"
#import <Foundation/Foundation.h>
#import "flutter/shell/platform/darwin/ios/framework/Source/IOKit.h"
namespace {
// RAII holder for `thread_array_t` this is so any early returns in
// `ProfilerMetricsIOS::CpuUsage` don't leak them.
class MachThreads {
public:
thread_array_t threads = NULL;
mach_msg_type_number_t thread_count = 0;
MachThreads() = default;
~MachThreads() {
kern_return_t kernel_return_code = vm_deallocate(
mach_task_self(), reinterpret_cast<vm_offset_t>(threads), thread_count * sizeof(thread_t));
FML_CHECK(kernel_return_code == KERN_SUCCESS) << "Failed to deallocate thread infos.";
}
private:
FML_DISALLOW_COPY_AND_ASSIGN(MachThreads);
};
} // namespace
namespace flutter {
namespace {
#if FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG || \
FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_PROFILE
template <typename T>
T ClearValue() {
return nullptr;
}
template <>
io_object_t ClearValue<io_object_t>() {
return 0;
}
template <typename T>
/// Generic RAII wrapper like unique_ptr but gives access to its handle.
class Scoped {
public:
typedef void (*Deleter)(T);
explicit Scoped(Deleter deleter) : object_(ClearValue<T>()), deleter_(deleter) {}
Scoped(T object, Deleter deleter) : object_(object), deleter_(deleter) {}
~Scoped() {
if (object_) {
deleter_(object_);
}
}
T* handle() {
if (object_) {
deleter_(object_);
object_ = ClearValue<T>();
}
return &object_;
}
T get() { return object_; }
void reset(T new_value) {
if (object_) {
deleter_(object_);
}
object_ = new_value;
}
private:
FML_DISALLOW_COPY_ASSIGN_AND_MOVE(Scoped);
T object_;
Deleter deleter_;
};
void DeleteCF(CFMutableDictionaryRef value) {
CFRelease(value);
}
void DeleteIO(io_object_t value) {
IOObjectRelease(value);
}
std::optional<GpuUsageInfo> FindGpuUsageInfo(io_iterator_t iterator) {
for (Scoped<io_registry_entry_t> regEntry(IOIteratorNext(iterator), DeleteIO); regEntry.get();
regEntry.reset(IOIteratorNext(iterator))) {
Scoped<CFMutableDictionaryRef> serviceDictionary(DeleteCF);
if (IORegistryEntryCreateCFProperties(regEntry.get(), serviceDictionary.handle(),
kCFAllocatorDefault, kNilOptions) != kIOReturnSuccess) {
continue;
}
NSDictionary* dictionary =
((__bridge NSDictionary*)serviceDictionary.get())[@"PerformanceStatistics"];
NSNumber* utilization = dictionary[@"Device Utilization %"];
if (utilization) {
return (GpuUsageInfo){.percent_usage = [utilization doubleValue]};
}
}
return std::nullopt;
}
[[maybe_unused]] std::optional<GpuUsageInfo> FindSimulatorGpuUsageInfo() {
Scoped<io_iterator_t> iterator(DeleteIO);
if (IOServiceGetMatchingServices(kIOMasterPortDefault, IOServiceNameMatching("IntelAccelerator"),
iterator.handle()) == kIOReturnSuccess) {
return FindGpuUsageInfo(iterator.get());
}
return std::nullopt;
}
[[maybe_unused]] std::optional<GpuUsageInfo> FindDeviceGpuUsageInfo() {
Scoped<io_iterator_t> iterator(DeleteIO);
if (IOServiceGetMatchingServices(kIOMasterPortDefault, IOServiceNameMatching("sgx"),
iterator.handle()) == kIOReturnSuccess) {
for (Scoped<io_registry_entry_t> regEntry(IOIteratorNext(iterator.get()), DeleteIO);
regEntry.get(); regEntry.reset(IOIteratorNext(iterator.get()))) {
Scoped<io_iterator_t> innerIterator(DeleteIO);
if (IORegistryEntryGetChildIterator(regEntry.get(), kIOServicePlane,
innerIterator.handle()) == kIOReturnSuccess) {
std::optional<GpuUsageInfo> result = FindGpuUsageInfo(innerIterator.get());
if (result.has_value()) {
return result;
}
}
}
}
return std::nullopt;
}
#endif // FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG ||
// FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_PROFILE
std::optional<GpuUsageInfo> PollGpuUsage() {
#if (FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_RELEASE || \
FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_JIT_RELEASE)
return std::nullopt;
#elif TARGET_IPHONE_SIMULATOR
return FindSimulatorGpuUsageInfo();
#elif TARGET_OS_IOS
return FindDeviceGpuUsageInfo();
#endif // TARGET_IPHONE_SIMULATOR
}
} // namespace
ProfileSample ProfilerMetricsIOS::GenerateSample() {
return {.cpu_usage = CpuUsage(), .memory_usage = MemoryUsage(), .gpu_usage = PollGpuUsage()};
}
std::optional<CpuUsageInfo> ProfilerMetricsIOS::CpuUsage() {
kern_return_t kernel_return_code;
MachThreads mach_threads = MachThreads();
// Get threads in the task
kernel_return_code =
task_threads(mach_task_self(), &mach_threads.threads, &mach_threads.thread_count);
if (kernel_return_code != KERN_SUCCESS) {
FML_LOG(ERROR) << "Error retrieving task information: "
<< mach_error_string(kernel_return_code);
return std::nullopt;
}
double total_cpu_usage = 0.0;
uint32_t num_threads = mach_threads.thread_count;
// Add the CPU usage for each thread. It should be noted that there may be some CPU usage missing
// from this calculation. If a thread ends between calls to this routine, then its info will be
// lost. We could solve this by installing a callback using pthread_key_create. The callback would
// report the thread is ending and allow the code to get the CPU usage. But we need to call
// pthread_setspecific in each thread to set the key's value to a non-null value for the callback
// to work. If we really need this information and if we have a good mechanism for calling
// pthread_setspecific in every thread, then we can include that value in the CPU usage.
for (mach_msg_type_number_t i = 0; i < mach_threads.thread_count; i++) {
thread_basic_info_data_t basic_thread_info;
mach_msg_type_number_t thread_info_count = THREAD_BASIC_INFO_COUNT;
kernel_return_code =
thread_info(mach_threads.threads[i], THREAD_BASIC_INFO,
reinterpret_cast<thread_info_t>(&basic_thread_info), &thread_info_count);
switch (kernel_return_code) {
case KERN_SUCCESS: {
const double current_thread_cpu_usage =
basic_thread_info.cpu_usage / static_cast<float>(TH_USAGE_SCALE);
total_cpu_usage += current_thread_cpu_usage;
break;
}
case MACH_SEND_TIMEOUT:
case MACH_SEND_TIMED_OUT:
case MACH_SEND_INVALID_DEST:
// Ignore as this thread been destroyed. The possible return codes are not really well
// documented. This handling is inspired from the following sources:
// - https://opensource.apple.com/source/xnu/xnu-4903.221.2/tests/task_inspect.c.auto.html
// - https://github.com/apple/swift-corelibs-libdispatch/blob/main/src/queue.c#L6617
num_threads--;
break;
default:
FML_LOG(ERROR) << "Error retrieving thread information: "
<< mach_error_string(kernel_return_code);
return std::nullopt;
}
}
flutter::CpuUsageInfo cpu_usage_info = {.num_threads = num_threads,
.total_cpu_usage = total_cpu_usage * 100.0};
return cpu_usage_info;
}
std::optional<MemoryUsageInfo> ProfilerMetricsIOS::MemoryUsage() {
kern_return_t kernel_return_code;
task_vm_info_data_t task_memory_info;
mach_msg_type_number_t task_memory_info_count = TASK_VM_INFO_COUNT;
kernel_return_code =
task_info(mach_task_self(), TASK_VM_INFO, reinterpret_cast<task_info_t>(&task_memory_info),
&task_memory_info_count);
if (kernel_return_code != KERN_SUCCESS) {
FML_LOG(ERROR) << " Error retrieving task memory information: "
<< mach_error_string(kernel_return_code);
return std::nullopt;
}
// `phys_footprint` is Apple's recommended way to measure app's memory usage. It provides the
// best approximate to xcode memory gauge. According to its source code explanation, the physical
// footprint mainly consists of app's internal memory data and IOKit mappings. `resident_size`
// is the total physical memory used by the app, so we simply do `resident_size - phys_footprint`
// to obtain the shared memory usage.
const double dirty_memory_usage =
static_cast<double>(task_memory_info.phys_footprint) / 1024.0 / 1024.0;
const double owned_shared_memory_usage =
static_cast<double>(task_memory_info.resident_size) / 1024.0 / 1024.0 - dirty_memory_usage;
flutter::MemoryUsageInfo memory_usage_info = {
.dirty_memory_usage = dirty_memory_usage,
.owned_shared_memory_usage = owned_shared_memory_usage};
return memory_usage_info;
}
} // namespace flutter
| engine/shell/platform/darwin/ios/framework/Source/profiler_metrics_ios.mm/0 | {
"file_path": "engine/shell/platform/darwin/ios/framework/Source/profiler_metrics_ios.mm",
"repo_id": "engine",
"token_count": 3492
} | 361 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_IOS_SURFACE_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_IOS_SURFACE_H_
#import "flutter/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.h"
#include <memory>
#include "flutter/flow/embedded_views.h"
#include "flutter/flow/surface.h"
#include "flutter/fml/macros.h"
#include "flutter/fml/platform/darwin/scoped_nsobject.h"
@class CALayer;
namespace flutter {
// Returns true if the app explicitly specified to use the iOS view embedding
// mechanism which is still in a release preview.
bool IsIosEmbeddedViewsPreviewEnabled();
class IOSSurface {
public:
static std::unique_ptr<IOSSurface> Create(std::shared_ptr<IOSContext> context,
const fml::scoped_nsobject<CALayer>& layer);
std::shared_ptr<IOSContext> GetContext() const;
virtual ~IOSSurface();
virtual bool IsValid() const = 0;
virtual void UpdateStorageSizeIfNecessary() = 0;
// Creates a GPU surface. If no GrDirectContext is supplied and the rendering mode
// supports one, a new one will be created; otherwise, the software backend
// will be used.
//
// If a GrDirectContext is supplied, creates a secondary surface.
virtual std::unique_ptr<Surface> CreateGPUSurface(GrDirectContext* gr_context = nullptr) = 0;
protected:
explicit IOSSurface(std::shared_ptr<IOSContext> ios_context);
private:
std::shared_ptr<IOSContext> ios_context_;
FML_DISALLOW_COPY_AND_ASSIGN(IOSSurface);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_IOS_SURFACE_H_
| engine/shell/platform/darwin/ios/ios_surface.h/0 | {
"file_path": "engine/shell/platform/darwin/ios/ios_surface.h",
"repo_id": "engine",
"token_count": 627
} | 362 |
# Flutter macOS Embedder
This directory contains files specific to the Flutter macOS embedder. The
embedder is built as a macOS framework that is linked into the target
application. The framework is composed both of macOS-specific code and code
shared with the iOS embedder. These can be found in:
```
flutter/shell/platform/darwin/common/framework
flutter/shell/platform/darwin/macos/framework
```
Additionally, the framework relies on utility code shared across all embedders,
which is found in:
```
flutter/shell/platform/common
```
To learn more, see the [Engine architecture wiki][wiki_arch].
## Building
Building all artifacts required for macOS occurs in two steps:
* Host binary build: Builds tooling used by the Flutter tool to build Flutter
applications targeting macOS.
* macOS Target build: Builds the framework that implements the macOS Flutter
embedder and exposes public API such as `FlutterViewController` used by
Flutter applications.
Once you've [prepared your environment for engine development][wiki_engine_env],
you can build the macOS embedder from the `src/flutter` directory using the
following commands:
```sh
# Perform the host build.
./tools/gn --unopt --no-goma
autoninja -C ../out/host_debug_unopt
# Perform the macOS target build.
./tools/gn --unopt --mac --no-goma
autoninja -C ../out/mac_debug_unopt
```
Builds are architecture-specific, and can be controlled by specifying
`--mac-cpu=arm64` or `--mac-cpu=x64` (default) when invoking `gn`.
Googlers can remove `--no-goma` to make use of the Goma distributed compile
service.
## Testing
The macOS-specific embedder tests are built as the
`flutter_desktop_darwin_unittests` binary. Like all gtest-based test binaries, a
subset of tests can be run by applying a filter such as
`--gtest_filter='FlutterViewControllerTest.*Key*'`.
More general details on testing can be found on the [Wiki][wiki_engine_testing].
[wiki_arch]: https://github.com/flutter/flutter/wiki/The-Engine-architecture
[wiki_engine_env]: https://github.com/flutter/flutter/wiki/Setting-up-the-Engine-development-environment
[wiki_engine_testing]: https://github.com/flutter/flutter/wiki/Testing-the-engine
| engine/shell/platform/darwin/macos/README.md/0 | {
"file_path": "engine/shell/platform/darwin/macos/README.md",
"repo_id": "engine",
"token_count": 628
} | 363 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "flutter/shell/platform/darwin/macos/framework/Headers/FlutterAppLifecycleDelegate.h"
#import "flutter/shell/platform/darwin/macos/framework/Source/FlutterAppLifecycleDelegate_Internal.h"
#include <AppKit/AppKit.h>
#include <AppKit/NSApplication.h>
#include <Foundation/Foundation.h>
#include <objc/message.h>
#include "flutter/fml/logging.h"
#include "flutter/fml/paths.h"
@implementation FlutterAppLifecycleRegistrar {
NSMutableArray* _notificationUnsubscribers;
}
- (void)addObserverFor:(NSString*)name selector:(SEL)selector {
[[NSNotificationCenter defaultCenter] addObserver:self selector:selector name:name object:nil];
__block NSObject* blockSelf = self;
dispatch_block_t unsubscribe = ^{
[[NSNotificationCenter defaultCenter] removeObserver:blockSelf name:name object:nil];
};
[_notificationUnsubscribers addObject:[unsubscribe copy]];
}
- (instancetype)init {
if (self = [super init]) {
_notificationUnsubscribers = [[NSMutableArray alloc] init];
// Using a macro to avoid errors where the notification doesn't match the
// selector.
#ifdef OBSERVE_NOTIFICATION
#error OBSERVE_NOTIFICATION ALREADY DEFINED!
#else
#define OBSERVE_NOTIFICATION(SELECTOR) \
[self addObserverFor:NSApplication##SELECTOR##Notification selector:@selector(handle##SELECTOR:)]
#endif
OBSERVE_NOTIFICATION(WillFinishLaunching);
OBSERVE_NOTIFICATION(DidFinishLaunching);
OBSERVE_NOTIFICATION(WillBecomeActive);
OBSERVE_NOTIFICATION(DidBecomeActive);
OBSERVE_NOTIFICATION(WillResignActive);
OBSERVE_NOTIFICATION(DidResignActive);
OBSERVE_NOTIFICATION(WillTerminate);
OBSERVE_NOTIFICATION(WillHide);
OBSERVE_NOTIFICATION(DidHide);
OBSERVE_NOTIFICATION(WillUnhide);
OBSERVE_NOTIFICATION(DidUnhide);
OBSERVE_NOTIFICATION(DidChangeScreenParameters);
OBSERVE_NOTIFICATION(DidChangeOcclusionState);
#undef OBSERVE_NOTIFICATION
_delegates = [NSPointerArray weakObjectsPointerArray];
}
return self;
}
- (void)dealloc {
for (dispatch_block_t unsubscribe in _notificationUnsubscribers) {
unsubscribe();
}
[_notificationUnsubscribers removeAllObjects];
_delegates = nil;
_notificationUnsubscribers = nil;
}
static BOOL IsPowerOfTwo(NSUInteger x) {
return x != 0 && (x & (x - 1)) == 0;
}
- (void)addDelegate:(NSObject<FlutterAppLifecycleDelegate>*)delegate {
[_delegates addPointer:(__bridge void*)delegate];
if (IsPowerOfTwo([_delegates count])) {
[_delegates compact];
}
}
- (void)removeDelegate:(NSObject<FlutterAppLifecycleDelegate>*)delegate {
NSUInteger index = [[_delegates allObjects] indexOfObject:delegate];
if (index != NSNotFound) {
[_delegates removePointerAtIndex:index];
}
}
// This isn't done via performSelector because that can cause leaks due to the
// selector not being known. Using a macro to avoid mismatch errors between the
// notification and the selector.
#ifdef DISTRIBUTE_NOTIFICATION
#error DISTRIBUTE_NOTIFICATION ALREADY DEFINED!
#else
#define DISTRIBUTE_NOTIFICATION(SELECTOR) \
-(void)handle##SELECTOR : (NSNotification*)notification { \
for (NSObject<FlutterAppLifecycleDelegate> * delegate in _delegates) { \
if ([delegate respondsToSelector:@selector(handle##SELECTOR:)]) { \
[delegate handle##SELECTOR:notification]; \
} \
} \
}
#endif
DISTRIBUTE_NOTIFICATION(WillFinishLaunching)
DISTRIBUTE_NOTIFICATION(DidFinishLaunching)
DISTRIBUTE_NOTIFICATION(WillBecomeActive)
DISTRIBUTE_NOTIFICATION(DidBecomeActive)
DISTRIBUTE_NOTIFICATION(WillResignActive)
DISTRIBUTE_NOTIFICATION(DidResignActive)
DISTRIBUTE_NOTIFICATION(WillTerminate)
DISTRIBUTE_NOTIFICATION(WillHide)
DISTRIBUTE_NOTIFICATION(WillUnhide)
DISTRIBUTE_NOTIFICATION(DidHide)
DISTRIBUTE_NOTIFICATION(DidUnhide)
DISTRIBUTE_NOTIFICATION(DidChangeScreenParameters)
DISTRIBUTE_NOTIFICATION(DidChangeOcclusionState)
#undef DISTRIBUTE_NOTIFICATION
@end
| engine/shell/platform/darwin/macos/framework/Source/FlutterAppLifecycleDelegate.mm/0 | {
"file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterAppLifecycleDelegate.mm",
"repo_id": "engine",
"token_count": 1649
} | 364 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTEREMBEDDERKEYRESPONDER_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTEREMBEDDERKEYRESPONDER_H_
#import <Cocoa/Cocoa.h>
#import "flutter/shell/platform/darwin/macos/framework/Source/FlutterKeyPrimaryResponder.h"
#include "flutter/shell/platform/embedder/embedder.h"
typedef void (^FlutterSendEmbedderKeyEvent)(const FlutterKeyEvent& /* event */,
_Nullable FlutterKeyEventCallback /* callback */,
void* _Nullable /* user_data */);
/**
* A primary responder of |FlutterKeyboardManager| that handles events by
* sending the converted events through the embedder API.
*
* This class communicates with the HardwareKeyboard API in the framework.
*/
@interface FlutterEmbedderKeyResponder : NSObject <FlutterKeyPrimaryResponder>
/**
* Create an instance by specifying the function to send converted events to.
*
* The |sendEvent| is typically |FlutterEngine|'s |sendKeyEvent|.
*/
- (nonnull instancetype)initWithSendEvent:(_Nonnull FlutterSendEmbedderKeyEvent)sendEvent;
/**
* Synthesize modifier keys events.
*
* If needed, synthesize modifier keys up and down events by comparing their
* current pressing states with the given modifier flags.
*/
- (void)syncModifiersIfNeeded:(NSEventModifierFlags)modifierFlags
timestamp:(NSTimeInterval)timestamp;
/**
* Returns the keyboard pressed state.
*
* Returns the keyboard pressed state. The dictionary contains one entry per
* pressed keys, mapping from the logical key to the physical key.
*/
- (nonnull NSDictionary*)getPressedState;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTEREMBEDDERKEYRESPONDER_H_
| engine/shell/platform/darwin/macos/framework/Source/FlutterEmbedderKeyResponder.h/0 | {
"file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterEmbedderKeyResponder.h",
"repo_id": "engine",
"token_count": 685
} | 365 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "FlutterMenuPlugin.h"
#include <map>
#import "flutter/shell/platform/common/platform_provided_menu.h"
#import "flutter/shell/platform/darwin/common/framework/Headers/FlutterChannels.h"
#import "flutter/shell/platform/darwin/common/framework/Headers/FlutterCodecs.h"
// Channel constants
static NSString* const kChannelName = @"flutter/menu";
static NSString* const kIsPluginAvailableMethod = @"Menu.isPluginAvailable";
static NSString* const kMenuSetMenusMethod = @"Menu.setMenus";
static NSString* const kMenuSelectedCallbackMethod = @"Menu.selectedCallback";
static NSString* const kMenuOpenedMethod = @"Menu.opened";
static NSString* const kMenuClosedMethod = @"Menu.closed";
// Serialization keys for menu objects
static NSString* const kIdKey = @"id";
static NSString* const kLabelKey = @"label";
static NSString* const kEnabledKey = @"enabled";
static NSString* const kChildrenKey = @"children";
static NSString* const kDividerKey = @"isDivider";
static NSString* const kShortcutCharacterKey = @"shortcutCharacter";
static NSString* const kShortcutTriggerKey = @"shortcutTrigger";
static NSString* const kShortcutModifiersKey = @"shortcutModifiers";
static NSString* const kPlatformProvidedMenuKey = @"platformProvidedMenu";
// Key shortcut constants
constexpr int kFlutterShortcutModifierMeta = 1 << 0;
constexpr int kFlutterShortcutModifierShift = 1 << 1;
constexpr int kFlutterShortcutModifierAlt = 1 << 2;
constexpr int kFlutterShortcutModifierControl = 1 << 3;
constexpr uint64_t kFlutterKeyIdPlaneMask = 0xff00000000l;
constexpr uint64_t kFlutterKeyIdUnicodePlane = 0x0000000000l;
constexpr uint64_t kFlutterKeyIdValueMask = 0x00ffffffffl;
static const NSDictionary* logicalKeyToKeyCode = {};
// What to look for in menu titles to replace with the application name.
static NSString* const kAppName = @"APP_NAME";
// Odd facts about AppKit key equivalents:
//
// 1) ⌃⇧1 and ⇧1 cannot exist in the same app, or the former triggers the latter’s
// action.
// 2) ⌃⌥⇧1 and ⇧1 cannot exist in the same app, or the former triggers the latter’s
// action.
// 3) ⌃⌥⇧1 and ⌃⇧1 cannot exist in the same app, or the former triggers the latter’s
// action.
// 4) ⌃⇧a is equivalent to ⌃A: If a keyEquivalent is a capitalized alphabetical
// letter and keyEquivalentModifierMask does not include
// NSEventModifierFlagShift, AppKit will add ⇧ automatically in the UI.
/**
* Maps the string used by NSMenuItem for the given special key equivalent.
* Keys are the logical key ids of matching trigger keys.
*/
static NSDictionary<NSNumber*, NSNumber*>* GetMacOsSpecialKeys() {
return @{
@0x00100000008 : [NSNumber numberWithInt:NSBackspaceCharacter],
@0x00100000009 : [NSNumber numberWithInt:NSTabCharacter],
@0x0010000000a : [NSNumber numberWithInt:NSNewlineCharacter],
@0x0010000000c : [NSNumber numberWithInt:NSFormFeedCharacter],
@0x0010000000d : [NSNumber numberWithInt:NSCarriageReturnCharacter],
@0x0010000007f : [NSNumber numberWithInt:NSDeleteCharacter],
@0x00100000801 : [NSNumber numberWithInt:NSF1FunctionKey],
@0x00100000802 : [NSNumber numberWithInt:NSF2FunctionKey],
@0x00100000803 : [NSNumber numberWithInt:NSF3FunctionKey],
@0x00100000804 : [NSNumber numberWithInt:NSF4FunctionKey],
@0x00100000805 : [NSNumber numberWithInt:NSF5FunctionKey],
@0x00100000806 : [NSNumber numberWithInt:NSF6FunctionKey],
@0x00100000807 : [NSNumber numberWithInt:NSF7FunctionKey],
@0x00100000808 : [NSNumber numberWithInt:NSF8FunctionKey],
@0x00100000809 : [NSNumber numberWithInt:NSF9FunctionKey],
@0x0010000080a : [NSNumber numberWithInt:NSF10FunctionKey],
@0x0010000080b : [NSNumber numberWithInt:NSF11FunctionKey],
@0x0010000080c : [NSNumber numberWithInt:NSF12FunctionKey],
@0x0010000080d : [NSNumber numberWithInt:NSF13FunctionKey],
@0x0010000080e : [NSNumber numberWithInt:NSF14FunctionKey],
@0x0010000080f : [NSNumber numberWithInt:NSF15FunctionKey],
@0x00100000810 : [NSNumber numberWithInt:NSF16FunctionKey],
@0x00100000811 : [NSNumber numberWithInt:NSF17FunctionKey],
@0x00100000812 : [NSNumber numberWithInt:NSF18FunctionKey],
@0x00100000813 : [NSNumber numberWithInt:NSF19FunctionKey],
@0x00100000814 : [NSNumber numberWithInt:NSF20FunctionKey],
// For some reason, there don't appear to be constants for these in ObjC. In
// Swift, there is a class with static members for these: KeyEquivalent. The
// values below are taken from that (where they don't already appear above).
@0x00100000302 : @0xf702, // ArrowLeft
@0x00100000303 : @0xf703, // ArrowRight
@0x00100000304 : @0xf700, // ArrowUp
@0x00100000301 : @0xf701, // ArrowDown
@0x00100000306 : @0xf729, // Home
@0x00100000305 : @0xf72B, // End
@0x00100000308 : @0xf72c, // PageUp
@0x00100000307 : @0xf72d, // PageDown
@0x0010000001b : @0x001B, // Escape
};
}
/**
* The mapping from the PlatformProvidedMenu enum to the macOS selectors for the provided
* menus.
*/
static const std::map<flutter::PlatformProvidedMenu, SEL> GetMacOSProvidedMenus() {
return {
{flutter::PlatformProvidedMenu::kAbout, @selector(orderFrontStandardAboutPanel:)},
{flutter::PlatformProvidedMenu::kQuit, @selector(terminate:)},
// servicesSubmenu is handled specially below: it is assumed to be the first
// submenu in the preserved platform provided menus, since it doesn't have a
// definitive selector like the rest.
{flutter::PlatformProvidedMenu::kServicesSubmenu, @selector(submenuAction:)},
{flutter::PlatformProvidedMenu::kHide, @selector(hide:)},
{flutter::PlatformProvidedMenu::kHideOtherApplications, @selector(hideOtherApplications:)},
{flutter::PlatformProvidedMenu::kShowAllApplications, @selector(unhideAllApplications:)},
{flutter::PlatformProvidedMenu::kStartSpeaking, @selector(startSpeaking:)},
{flutter::PlatformProvidedMenu::kStopSpeaking, @selector(stopSpeaking:)},
{flutter::PlatformProvidedMenu::kToggleFullScreen, @selector(toggleFullScreen:)},
{flutter::PlatformProvidedMenu::kMinimizeWindow, @selector(performMiniaturize:)},
{flutter::PlatformProvidedMenu::kZoomWindow, @selector(performZoom:)},
{flutter::PlatformProvidedMenu::kArrangeWindowsInFront, @selector(arrangeInFront:)},
};
}
/**
* Returns the NSEventModifierFlags of |modifiers|, a value from
* kShortcutKeyModifiers.
*/
static NSEventModifierFlags KeyEquivalentModifierMaskForModifiers(NSNumber* modifiers) {
int flutterModifierFlags = modifiers.intValue;
NSEventModifierFlags flags = 0;
if (flutterModifierFlags & kFlutterShortcutModifierMeta) {
flags |= NSEventModifierFlagCommand;
}
if (flutterModifierFlags & kFlutterShortcutModifierShift) {
flags |= NSEventModifierFlagShift;
}
if (flutterModifierFlags & kFlutterShortcutModifierAlt) {
flags |= NSEventModifierFlagOption;
}
if (flutterModifierFlags & kFlutterShortcutModifierControl) {
flags |= NSEventModifierFlagControl;
}
// There are also modifier flags for things like the function (Fn) key, but
// the framework doesn't support those.
return flags;
}
/**
* An NSMenuDelegate used to listen for changes in the menu when it opens and
* closes.
*/
@interface FlutterMenuDelegate : NSObject <NSMenuDelegate>
/**
* When this delegate receives notification that the menu opened or closed, it
* will send a message on the given channel to that effect for the menu item
* with the given id (the ID comes from the data supplied by the framework to
* |FlutterMenuPlugin.setMenus|).
*/
- (instancetype)initWithIdentifier:(int64_t)identifier channel:(FlutterMethodChannel*)channel;
@end
@implementation FlutterMenuDelegate {
FlutterMethodChannel* _channel;
int64_t _identifier;
}
- (instancetype)initWithIdentifier:(int64_t)identifier channel:(FlutterMethodChannel*)channel {
self = [super init];
if (self) {
_identifier = identifier;
_channel = channel;
}
return self;
}
- (void)menuWillOpen:(NSMenu*)menu {
[_channel invokeMethod:kMenuOpenedMethod arguments:@(_identifier)];
}
- (void)menuDidClose:(NSMenu*)menu {
[_channel invokeMethod:kMenuClosedMethod arguments:@(_identifier)];
}
@end
@interface FlutterMenuPlugin ()
// Initialize the plugin with the given method channel.
- (instancetype)initWithChannel:(FlutterMethodChannel*)channel;
// Iterates through the given menu hierarchy, and replaces "APP_NAME"
// with the localized running application name.
- (void)replaceAppName:(NSArray<NSMenuItem*>*)items;
// Look up the menu item with the given selector in the list of provided menus
// and return it.
- (NSMenuItem*)findProvidedMenuItem:(NSMenu*)menu ofType:(SEL)selector;
// Create a platform-provided menu from the given enum type.
- (NSMenuItem*)createPlatformProvidedMenu:(flutter::PlatformProvidedMenu)type;
// Create an NSMenuItem from information in the dictionary sent by the framework.
- (NSMenuItem*)menuItemFromFlutterRepresentation:(NSDictionary*)representation;
// Invokes kMenuSelectedCallbackMethod with the senders ID.
//
// Used as the callback for all Flutter-created menu items that have IDs.
- (void)flutterMenuItemSelected:(id)sender;
// Replaces the NSApp.mainMenu with menus created from an array of top level
// menus sent by the framework.
- (void)setMenus:(nonnull NSDictionary*)representation;
@end
@implementation FlutterMenuPlugin {
// The channel used to communicate with Flutter.
FlutterMethodChannel* _channel;
// This contains a copy of the default platform provided items.
NSArray<NSMenuItem*>* _platformProvidedItems;
// These are the menu delegates that will listen to open/close events for menu
// items. This array is holding them so that we can deallocate them when
// rebuilding the menus.
NSMutableArray<FlutterMenuDelegate*>* _menuDelegates;
}
#pragma mark - Private Methods
- (instancetype)initWithChannel:(FlutterMethodChannel*)channel {
self = [super init];
if (self) {
_channel = channel;
_platformProvidedItems = @[];
_menuDelegates = [[NSMutableArray alloc] init];
// Make a copy of all the platform provided menus for later use.
_platformProvidedItems = [[NSApp.mainMenu itemArray] mutableCopy];
// As copied, these platform provided menu items don't yet have the APP_NAME
// string replaced in them, so this rectifies that.
[self replaceAppName:_platformProvidedItems];
}
return self;
}
/**
* Iterates through the given menu hierarchy, and replaces "APP_NAME"
* with the localized running application name.
*/
- (void)replaceAppName:(NSArray<NSMenuItem*>*)items {
NSString* appName = [NSRunningApplication currentApplication].localizedName;
for (NSMenuItem* item in items) {
if ([[item title] containsString:kAppName]) {
[item setTitle:[[item title] stringByReplacingOccurrencesOfString:kAppName
withString:appName]];
}
if ([item hasSubmenu]) {
[self replaceAppName:[[item submenu] itemArray]];
}
}
}
- (NSMenuItem*)findProvidedMenuItem:(NSMenu*)menu ofType:(SEL)selector {
const NSArray<NSMenuItem*>* items = menu ? menu.itemArray : _platformProvidedItems;
for (NSMenuItem* item in items) {
if ([item action] == selector) {
return item;
}
if ([[item submenu] numberOfItems] > 0) {
NSMenuItem* foundChild = [self findProvidedMenuItem:[item submenu] ofType:selector];
if (foundChild) {
return foundChild;
}
}
}
return nil;
}
- (NSMenuItem*)createPlatformProvidedMenu:(flutter::PlatformProvidedMenu)type {
const std::map<flutter::PlatformProvidedMenu, SEL> providedMenus = GetMacOSProvidedMenus();
auto found_type = providedMenus.find(type);
if (found_type == providedMenus.end()) {
return nil;
}
SEL selectorTarget = found_type->second;
// Since it doesn't have a definitive selector, the Services submenu is
// assumed to be the first item with a submenu action in the first menu item
// of the default menu set. We can't just get the title to check, since that
// is localized, and the contents of the menu aren't fixed (or even available).
NSMenu* startingMenu = type == flutter::PlatformProvidedMenu::kServicesSubmenu
? [_platformProvidedItems[0] submenu]
: nil;
NSMenuItem* found = [self findProvidedMenuItem:startingMenu ofType:selectorTarget];
// Return a copy because the original menu item might not have been removed
// from the main menu yet, and AppKit doesn't like menu items that exist in
// more than one menu at a time.
return [found copy];
}
- (NSMenuItem*)menuItemFromFlutterRepresentation:(NSDictionary*)representation {
if ([(NSNumber*)([representation valueForKey:kDividerKey]) intValue] == YES) {
return [NSMenuItem separatorItem];
}
NSNumber* platformProvidedMenuId = representation[kPlatformProvidedMenuKey];
NSString* keyEquivalent = @"";
if (platformProvidedMenuId) {
return [self
createPlatformProvidedMenu:(flutter::PlatformProvidedMenu)platformProvidedMenuId.intValue];
} else {
if (representation[kShortcutCharacterKey]) {
keyEquivalent = representation[kShortcutCharacterKey];
} else {
NSNumber* triggerKeyId = representation[kShortcutTriggerKey];
const NSDictionary<NSNumber*, NSNumber*>* specialKeys = GetMacOsSpecialKeys();
NSNumber* trigger = specialKeys[triggerKeyId];
if (trigger) {
keyEquivalent = [NSString stringWithFormat:@"%C", [trigger unsignedShortValue]];
} else {
if (([triggerKeyId unsignedLongLongValue] & kFlutterKeyIdPlaneMask) ==
kFlutterKeyIdUnicodePlane) {
keyEquivalent = [[NSString
stringWithFormat:@"%C", (unichar)([triggerKeyId unsignedLongLongValue] &
kFlutterKeyIdValueMask)] lowercaseString];
}
}
}
}
NSNumber* identifier = representation[kIdKey];
SEL action = (identifier ? @selector(flutterMenuItemSelected:) : NULL);
NSString* appName = [NSRunningApplication currentApplication].localizedName;
NSString* title = [representation[kLabelKey] stringByReplacingOccurrencesOfString:kAppName
withString:appName];
NSMenuItem* item = [[NSMenuItem alloc] initWithTitle:title
action:action
keyEquivalent:keyEquivalent];
if ([keyEquivalent length] > 0) {
item.keyEquivalentModifierMask =
KeyEquivalentModifierMaskForModifiers(representation[kShortcutModifiersKey]);
}
if (identifier) {
item.tag = identifier.longLongValue;
item.target = self;
}
NSNumber* enabled = representation[kEnabledKey];
if (enabled) {
item.enabled = enabled.boolValue;
}
NSArray* children = representation[kChildrenKey];
if (children && children.count > 0) {
NSMenu* submenu = [[NSMenu alloc] initWithTitle:title];
FlutterMenuDelegate* delegate = [[FlutterMenuDelegate alloc] initWithIdentifier:item.tag
channel:_channel];
[_menuDelegates addObject:delegate];
submenu.delegate = delegate;
submenu.autoenablesItems = NO;
for (NSDictionary* child in children) {
NSMenuItem* newItem = [self menuItemFromFlutterRepresentation:child];
if (newItem) {
[submenu addItem:newItem];
}
}
item.submenu = submenu;
}
return item;
}
- (void)flutterMenuItemSelected:(id)sender {
NSMenuItem* item = sender;
[_channel invokeMethod:kMenuSelectedCallbackMethod arguments:@(item.tag)];
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
if ([call.method isEqualToString:kIsPluginAvailableMethod]) {
result(@YES);
} else if ([call.method isEqualToString:kMenuSetMenusMethod]) {
NSDictionary* menus = call.arguments;
[self setMenus:menus];
result(nil);
} else {
result(FlutterMethodNotImplemented);
}
}
- (void)setMenus:(NSDictionary*)representation {
[_menuDelegates removeAllObjects];
NSMenu* newMenu = [[NSMenu alloc] init];
// There's currently only one window, named "0", but there could be other
// eventually, with different menu configurations.
for (NSDictionary* item in representation[@"0"]) {
NSMenuItem* menuItem = [self menuItemFromFlutterRepresentation:item];
menuItem.representedObject = self;
NSNumber* identifier = item[kIdKey];
FlutterMenuDelegate* delegate =
[[FlutterMenuDelegate alloc] initWithIdentifier:identifier.longLongValue channel:_channel];
[_menuDelegates addObject:delegate];
[menuItem submenu].delegate = delegate;
[newMenu addItem:menuItem];
}
NSApp.mainMenu = newMenu;
}
#pragma mark - Public Class Methods
+ (void)registerWithRegistrar:(nonnull id<FlutterPluginRegistrar>)registrar {
FlutterMethodChannel* channel = [FlutterMethodChannel methodChannelWithName:kChannelName
binaryMessenger:registrar.messenger];
FlutterMenuPlugin* instance = [[FlutterMenuPlugin alloc] initWithChannel:channel];
[registrar addMethodCallDelegate:instance channel:channel];
}
@end
| engine/shell/platform/darwin/macos/framework/Source/FlutterMenuPlugin.mm/0 | {
"file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterMenuPlugin.mm",
"repo_id": "engine",
"token_count": 6126
} | 366 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERSURFACE_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERSURFACE_H_
#import <Cocoa/Cocoa.h>
#import "flutter/shell/platform/embedder/embedder.h"
/**
* Opaque surface type.
* Can be represented as FlutterMetalTexture to cross the embedder API boundary.
*/
@interface FlutterSurface : NSObject
- (FlutterMetalTexture)asFlutterMetalTexture;
+ (nullable FlutterSurface*)fromFlutterMetalTexture:(nonnull const FlutterMetalTexture*)texture;
@end
/**
* Internal FlutterSurface interface used by FlutterSurfaceManager.
* Wraps an IOSurface framebuffer and metadata related to the surface.
*/
@interface FlutterSurface (Private)
- (nonnull instancetype)initWithSize:(CGSize)size device:(nonnull id<MTLDevice>)device;
@property(readonly, nonatomic, nonnull) IOSurfaceRef ioSurface;
@property(readonly, nonatomic) CGSize size;
@property(readonly, nonatomic) int64_t textureId;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERSURFACE_H_
| engine/shell/platform/darwin/macos/framework/Source/FlutterSurface.h/0 | {
"file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterSurface.h",
"repo_id": "engine",
"token_count": 418
} | 367 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERVIEWPROVIDER_H_
#define FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERVIEWPROVIDER_H_
#import "flutter/shell/platform/darwin/macos/framework/Source/FlutterView.h"
/**
* An interface to query FlutterView.
*
* See also:
*
* * FlutterViewEngineProvider, a typical implementation.
*/
@protocol FlutterViewProvider
/**
* Get the FlutterView with the given view ID.
*
* Returns nil if the ID is invalid.
*/
- (nullable FlutterView*)viewForId:(FlutterViewId)id;
@end
#endif // FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_SOURCE_FLUTTERVIEWPROVIDER_H_
| engine/shell/platform/darwin/macos/framework/Source/FlutterViewProvider.h/0 | {
"file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterViewProvider.h",
"repo_id": "engine",
"token_count": 286
} | 368 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/embedder/embedder_engine.h"
#include "flutter/fml/make_copyable.h"
#include "flutter/shell/platform/embedder/vsync_waiter_embedder.h"
namespace flutter {
struct ShellArgs {
Settings settings;
Shell::CreateCallback<PlatformView> on_create_platform_view;
Shell::CreateCallback<Rasterizer> on_create_rasterizer;
ShellArgs(const Settings& p_settings,
Shell::CreateCallback<PlatformView> p_on_create_platform_view,
Shell::CreateCallback<Rasterizer> p_on_create_rasterizer)
: settings(p_settings),
on_create_platform_view(std::move(p_on_create_platform_view)),
on_create_rasterizer(std::move(p_on_create_rasterizer)) {}
};
EmbedderEngine::EmbedderEngine(
std::unique_ptr<EmbedderThreadHost> thread_host,
const flutter::TaskRunners& task_runners,
const flutter::Settings& settings,
RunConfiguration run_configuration,
const Shell::CreateCallback<PlatformView>& on_create_platform_view,
const Shell::CreateCallback<Rasterizer>& on_create_rasterizer,
std::unique_ptr<EmbedderExternalTextureResolver> external_texture_resolver)
: thread_host_(std::move(thread_host)),
task_runners_(task_runners),
run_configuration_(std::move(run_configuration)),
shell_args_(std::make_unique<ShellArgs>(settings,
on_create_platform_view,
on_create_rasterizer)),
external_texture_resolver_(std::move(external_texture_resolver)) {}
EmbedderEngine::~EmbedderEngine() = default;
bool EmbedderEngine::LaunchShell() {
if (!shell_args_) {
FML_DLOG(ERROR) << "Invalid shell arguments.";
return false;
}
if (shell_) {
FML_DLOG(ERROR) << "Shell already initialized";
}
shell_ = Shell::Create(
flutter::PlatformData(), task_runners_, shell_args_->settings,
shell_args_->on_create_platform_view, shell_args_->on_create_rasterizer);
// Reset the args no matter what. They will never be used to initialize a
// shell again.
shell_args_.reset();
return IsValid();
}
bool EmbedderEngine::CollectShell() {
shell_.reset();
return IsValid();
}
bool EmbedderEngine::RunRootIsolate() {
if (!IsValid() || !run_configuration_.IsValid()) {
return false;
}
shell_->RunEngine(std::move(run_configuration_));
return true;
}
bool EmbedderEngine::IsValid() const {
return static_cast<bool>(shell_);
}
const TaskRunners& EmbedderEngine::GetTaskRunners() const {
return task_runners_;
}
bool EmbedderEngine::NotifyCreated() {
if (!IsValid()) {
return false;
}
shell_->GetPlatformView()->NotifyCreated();
return true;
}
bool EmbedderEngine::NotifyDestroyed() {
if (!IsValid()) {
return false;
}
shell_->GetPlatformView()->NotifyDestroyed();
return true;
}
bool EmbedderEngine::SetViewportMetrics(
int64_t view_id,
const flutter::ViewportMetrics& metrics) {
if (!IsValid()) {
return false;
}
auto platform_view = shell_->GetPlatformView();
if (!platform_view) {
return false;
}
platform_view->SetViewportMetrics(view_id, metrics);
return true;
}
bool EmbedderEngine::DispatchPointerDataPacket(
std::unique_ptr<flutter::PointerDataPacket> packet) {
if (!IsValid() || !packet) {
return false;
}
auto platform_view = shell_->GetPlatformView();
if (!platform_view) {
return false;
}
platform_view->DispatchPointerDataPacket(std::move(packet));
return true;
}
bool EmbedderEngine::SendPlatformMessage(
std::unique_ptr<PlatformMessage> message) {
if (!IsValid() || !message) {
return false;
}
auto platform_view = shell_->GetPlatformView();
if (!platform_view) {
return false;
}
platform_view->DispatchPlatformMessage(std::move(message));
return true;
}
bool EmbedderEngine::RegisterTexture(int64_t texture) {
if (!IsValid()) {
return false;
}
shell_->GetPlatformView()->RegisterTexture(
external_texture_resolver_->ResolveExternalTexture(texture));
return true;
}
bool EmbedderEngine::UnregisterTexture(int64_t texture) {
if (!IsValid()) {
return false;
}
shell_->GetPlatformView()->UnregisterTexture(texture);
return true;
}
bool EmbedderEngine::MarkTextureFrameAvailable(int64_t texture) {
if (!IsValid()) {
return false;
}
shell_->GetPlatformView()->MarkTextureFrameAvailable(texture);
return true;
}
bool EmbedderEngine::SetSemanticsEnabled(bool enabled) {
if (!IsValid()) {
return false;
}
auto platform_view = shell_->GetPlatformView();
if (!platform_view) {
return false;
}
platform_view->SetSemanticsEnabled(enabled);
return true;
}
bool EmbedderEngine::SetAccessibilityFeatures(int32_t flags) {
if (!IsValid()) {
return false;
}
auto platform_view = shell_->GetPlatformView();
if (!platform_view) {
return false;
}
platform_view->SetAccessibilityFeatures(flags);
return true;
}
bool EmbedderEngine::DispatchSemanticsAction(int node_id,
flutter::SemanticsAction action,
fml::MallocMapping args) {
if (!IsValid()) {
return false;
}
auto platform_view = shell_->GetPlatformView();
if (!platform_view) {
return false;
}
platform_view->DispatchSemanticsAction(node_id, action, std::move(args));
return true;
}
bool EmbedderEngine::OnVsyncEvent(intptr_t baton,
fml::TimePoint frame_start_time,
fml::TimePoint frame_target_time) {
if (!IsValid()) {
return false;
}
return VsyncWaiterEmbedder::OnEmbedderVsync(
task_runners_, baton, frame_start_time, frame_target_time);
}
bool EmbedderEngine::ReloadSystemFonts() {
if (!IsValid()) {
return false;
}
return shell_->ReloadSystemFonts();
}
bool EmbedderEngine::PostRenderThreadTask(const fml::closure& task) {
if (!IsValid()) {
return false;
}
shell_->GetTaskRunners().GetRasterTaskRunner()->PostTask(task);
return true;
}
bool EmbedderEngine::RunTask(const FlutterTask* task) {
// The shell doesn't need to be running or valid for access to the thread
// host. This is why there is no `IsValid` check here. This allows embedders
// to perform custom task runner interop before the shell is running.
if (task == nullptr) {
return false;
}
return thread_host_->PostTask(reinterpret_cast<int64_t>(task->runner),
task->task);
}
bool EmbedderEngine::PostTaskOnEngineManagedNativeThreads(
const std::function<void(FlutterNativeThreadType)>& closure) const {
if (!IsValid() || closure == nullptr) {
return false;
}
const auto trampoline = [closure](
FlutterNativeThreadType type,
const fml::RefPtr<fml::TaskRunner>& runner) {
runner->PostTask([closure, type] { closure(type); });
};
// Post the task to all thread host threads.
const auto& task_runners = shell_->GetTaskRunners();
trampoline(kFlutterNativeThreadTypeRender,
task_runners.GetRasterTaskRunner());
trampoline(kFlutterNativeThreadTypeWorker, task_runners.GetIOTaskRunner());
trampoline(kFlutterNativeThreadTypeUI, task_runners.GetUITaskRunner());
trampoline(kFlutterNativeThreadTypePlatform,
task_runners.GetPlatformTaskRunner());
// Post the task to all worker threads.
auto vm = shell_->GetDartVM();
vm->GetConcurrentMessageLoop()->PostTaskToAllWorkers(
[closure]() { closure(kFlutterNativeThreadTypeWorker); });
return true;
}
bool EmbedderEngine::ScheduleFrame() {
if (!IsValid()) {
return false;
}
auto platform_view = shell_->GetPlatformView();
if (!platform_view) {
return false;
}
platform_view->ScheduleFrame();
return true;
}
Shell& EmbedderEngine::GetShell() {
FML_DCHECK(shell_);
return *shell_.get();
}
} // namespace flutter
| engine/shell/platform/embedder/embedder_engine.cc/0 | {
"file_path": "engine/shell/platform/embedder/embedder_engine.cc",
"repo_id": "engine",
"token_count": 3014
} | 369 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_EMBEDDER_EMBEDDER_LAYERS_H_
#define FLUTTER_SHELL_PLATFORM_EMBEDDER_EMBEDDER_LAYERS_H_
#include <memory>
#include <vector>
#include "flutter/flow/embedded_views.h"
#include "flutter/fml/macros.h"
#include "flutter/shell/platform/embedder/embedder.h"
#include "third_party/skia/include/core/SkMatrix.h"
#include "third_party/skia/include/core/SkSize.h"
namespace flutter {
class EmbedderLayers {
public:
EmbedderLayers(SkISize frame_size,
double device_pixel_ratio,
SkMatrix root_surface_transformation,
uint64_t presentation_time);
~EmbedderLayers();
void PushBackingStoreLayer(const FlutterBackingStore* store,
const std::vector<SkIRect>& drawn_region);
void PushPlatformViewLayer(FlutterPlatformViewIdentifier identifier,
const EmbeddedViewParams& params);
using PresentCallback =
std::function<bool(FlutterViewId view_id,
const std::vector<const FlutterLayer*>& layers)>;
void InvokePresentCallback(FlutterViewId view_id,
const PresentCallback& callback) const;
private:
const SkISize frame_size_;
const double device_pixel_ratio_;
const SkMatrix root_surface_transformation_;
std::vector<std::unique_ptr<FlutterPlatformView>> platform_views_referenced_;
std::vector<std::unique_ptr<FlutterPlatformViewMutation>>
mutations_referenced_;
std::vector<std::unique_ptr<std::vector<const FlutterPlatformViewMutation*>>>
mutations_arrays_referenced_;
std::vector<std::unique_ptr<FlutterBackingStorePresentInfo>>
present_info_referenced_;
std::vector<std::unique_ptr<FlutterRegion>> regions_referenced_;
std::vector<std::unique_ptr<std::vector<FlutterRect>>> rects_referenced_;
std::vector<FlutterLayer> presented_layers_;
uint64_t presentation_time_;
FML_DISALLOW_COPY_AND_ASSIGN(EmbedderLayers);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_EMBEDDER_EMBEDDER_LAYERS_H_
| engine/shell/platform/embedder/embedder_layers.h/0 | {
"file_path": "engine/shell/platform/embedder/embedder_layers.h",
"repo_id": "engine",
"token_count": 878
} | 370 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/embedder/embedder_surface_gl.h"
#include <utility>
#include "flutter/shell/common/shell_io_manager.h"
namespace flutter {
EmbedderSurfaceGL::EmbedderSurfaceGL(
GLDispatchTable gl_dispatch_table,
bool fbo_reset_after_present,
std::shared_ptr<EmbedderExternalViewEmbedder> external_view_embedder)
: gl_dispatch_table_(std::move(gl_dispatch_table)),
fbo_reset_after_present_(fbo_reset_after_present),
external_view_embedder_(std::move(external_view_embedder)) {
// Make sure all required members of the dispatch table are checked.
if (!gl_dispatch_table_.gl_make_current_callback ||
!gl_dispatch_table_.gl_clear_current_callback ||
!gl_dispatch_table_.gl_present_callback ||
!gl_dispatch_table_.gl_fbo_callback ||
!gl_dispatch_table_.gl_populate_existing_damage) {
return;
}
valid_ = true;
}
EmbedderSurfaceGL::~EmbedderSurfaceGL() = default;
// |EmbedderSurface|
bool EmbedderSurfaceGL::IsValid() const {
return valid_;
}
// |GPUSurfaceGLDelegate|
std::unique_ptr<GLContextResult> EmbedderSurfaceGL::GLContextMakeCurrent() {
return std::make_unique<GLContextDefaultResult>(
gl_dispatch_table_.gl_make_current_callback());
}
// |GPUSurfaceGLDelegate|
bool EmbedderSurfaceGL::GLContextClearCurrent() {
return gl_dispatch_table_.gl_clear_current_callback();
}
// |GPUSurfaceGLDelegate|
bool EmbedderSurfaceGL::GLContextPresent(const GLPresentInfo& present_info) {
// Pass the present information to the embedder present callback.
return gl_dispatch_table_.gl_present_callback(present_info);
}
// |GPUSurfaceGLDelegate|
GLFBOInfo EmbedderSurfaceGL::GLContextFBO(GLFrameInfo frame_info) const {
// Get the FBO ID using the gl_fbo_callback and then get exiting damage by
// passing that ID to the gl_populate_existing_damage.
return gl_dispatch_table_.gl_populate_existing_damage(
gl_dispatch_table_.gl_fbo_callback(frame_info));
}
// |GPUSurfaceGLDelegate|
bool EmbedderSurfaceGL::GLContextFBOResetAfterPresent() const {
return fbo_reset_after_present_;
}
// |GPUSurfaceGLDelegate|
SkMatrix EmbedderSurfaceGL::GLContextSurfaceTransformation() const {
auto callback = gl_dispatch_table_.gl_surface_transformation_callback;
if (!callback) {
SkMatrix matrix;
matrix.setIdentity();
return matrix;
}
return callback();
}
// |GPUSurfaceGLDelegate|
EmbedderSurfaceGL::GLProcResolver EmbedderSurfaceGL::GetGLProcResolver() const {
return gl_dispatch_table_.gl_proc_resolver;
}
// |GPUSurfaceGLDelegate|
SurfaceFrame::FramebufferInfo EmbedderSurfaceGL::GLContextFramebufferInfo()
const {
// Enable partial repaint by default on the embedders.
auto info = SurfaceFrame::FramebufferInfo{};
info.supports_readback = true;
info.supports_partial_repaint =
gl_dispatch_table_.gl_populate_existing_damage != nullptr;
return info;
}
// |EmbedderSurface|
std::unique_ptr<Surface> EmbedderSurfaceGL::CreateGPUSurface() {
const bool render_to_surface = !external_view_embedder_;
return std::make_unique<GPUSurfaceGLSkia>(
this, // GPU surface GL delegate
render_to_surface // render to surface
);
}
// |EmbedderSurface|
sk_sp<GrDirectContext> EmbedderSurfaceGL::CreateResourceContext() const {
auto callback = gl_dispatch_table_.gl_make_resource_current_callback;
if (callback && callback()) {
if (auto context = ShellIOManager::CreateCompatibleResourceLoadingContext(
GrBackendApi::kOpenGL, GetGLInterface())) {
return context;
} else {
FML_LOG(ERROR)
<< "Internal error: Resource context available but could not create "
"a compatible Skia context.";
return nullptr;
}
}
// The callback was not available or failed.
FML_LOG(ERROR)
<< "Could not create a resource context for async texture uploads. "
"Expect degraded performance. Set a valid make_resource_current "
"callback on FlutterOpenGLRendererConfig.";
return nullptr;
}
} // namespace flutter
| engine/shell/platform/embedder/embedder_surface_gl.cc/0 | {
"file_path": "engine/shell/platform/embedder/embedder_surface_gl.cc",
"repo_id": "engine",
"token_count": 1493
} | 371 |
# `platform/embedder/fixtures`
The files in this directory are golden-file outputss of [`tests`](../tests),
but lack a simple way to be re-generated.
For example, here is what a failure might look like on CI:
```txt
[0;32m[ RUN ] [mEmbedderTest.VerifyB143464703WithSoftwareBackend
[ERROR:flutter/shell/platform/embedder/tests/embedder_unittests_util.cc(199)] Image did not match expectation.
Expected:/b/s/w/ir/cache/builder/src/out/host_debug_unopt/gen/flutter/shell/platform/embedder/assets/expectation_verifyb143464703_soft_noxform.png
Got:/b/s/w/ir/cache/builder/src/out/host_debug_unopt/gen/flutter/shell/platform/embedder/assets/actual_verifyb143464703_soft_noxform.png
../../flutter/shell/platform/embedder/tests/embedder_unittests.cc:1335: Failure
Value of: ImageMatchesFixture("verifyb143464703_soft_noxform.png", rendered_scene)
Actual: false
Expected: true
[0;31m[ FAILED ] [mEmbedderTest.VerifyB143464703WithSoftwareBackend (8077 ms)
[0;32m[----------] [m1 test from EmbedderTest (8080 ms total)
[0;32m[----------] [mGlobal test environment tear-down
[0;32m[==========] [m1 test from 1 test suite ran. (8080 ms total)
[0;32m[ PASSED ] [m0 tests.
[0;31m[ FAILED ] [m1 test, listed below:
[0;31m[ FAILED ] [mEmbedderTest.VerifyB143464703WithSoftwareBackend
1 FAILED TEST
[13/296] EmbedderTest.VerifyB143464703WithSoftwareBackend returned/aborted with exit code 1 (8226 ms)
[14/296] EmbedderTest.VerifyB143464703WithSoftwareBackend (8484 ms)
[INFO:test_timeout_listener.cc(76)] Test timeout of 300 seconds per test case will be enforced.
[0;33mNote: Google Test filter = EmbedderTest.VerifyB143464703WithSoftwareBackend
[m[0;32m[==========] [mRunning 1 test from 1 test suite.
[0;32m[----------] [mGlobal test environment set-up.
[0;32m[----------] [m1 test from EmbedderTest
[0;33m[ DISABLED ] [mEmbedderTest.DISABLED_CanLaunchAndShutdownMultipleTimes
[0;32m[ RUN ] [mEmbedderTest.VerifyB143464703WithSoftwareBackend
[ERROR:flutter/shell/platform/embedder/tests/embedder_unittests_util.cc(199)] Image did not match expectation.
Expected:/b/s/w/ir/cache/builder/src/out/host_debug_unopt/gen/flutter/shell/platform/embedder/assets/expectation_verifyb143464703_soft_noxform.png
Got:/b/s/w/ir/cache/builder/src/out/host_debug_unopt/gen/flutter/shell/platform/embedder/assets/actual_verifyb143464703_soft_noxform.png
../../flutter/shell/platform/embedder/tests/embedder_unittests.cc:1335: Failure
Value of: ImageMatchesFixture("verifyb143464703_soft_noxform.png", rendered_scene)
Actual: false
Expected: true
[0;31m[ FAILED ] [mEmbedderTest.VerifyB143464703WithSoftwareBackend (8348 ms)
[0;32m[----------] [m1 test from EmbedderTest (8350 ms total)
[0;32m[----------] [mGlobal test environment tear-down
[0;32m[==========] [m1 test from 1 test suite ran. (8350 ms total)
[0;32m[ PASSED ] [m0 tests.
[0;31m[ FAILED ] [m1 test, listed below:
[0;31m[ FAILED ] [mEmbedderTest.VerifyB143464703WithSoftwareBackend
```
In order to update `verifyb143464703_soft_noxform.png`:
```shell
# The examples below assume:
# $ENGINE = /path/to/engine/src
# $TARGET = /path/to/engine/src/out/{{host_you_want_to_build}}
# 1. Make sure you have built the engine:
$ ninja -j1000 -C $ENGINE/out/$TARGET
# 2. Run the test locally (assuming you have built the engine).
$ $ENGINE/out/$TARGET/embedder_unittests*
# Or, to run just a single test:
$ $ENGINE/out/$TARGET/embedder_unittests --gtest_filter="EmbedderTest.VerifyB143464703WithSoftwareBackend"
# Or, a suite of tests:
$ $ENGINE/out/$TARGET/embedder_unittests --gtest_filter="EmbedderTest.*"
# 3. Now, copy the output to the golden file:
$ cp \
$ENGINE/out/$TARGET/gen/flutter/shell/platform/embedder/assets/expectation_verifyb143464703_soft_noxform.png \
$ENGINE/flutter/shell/platform/embedder/fixtures/verifyb143464703_soft_noxform.png
```
⚠️ **WARNING**: Some of the golden tests do not run on non-Linux OSes, which
means its not currently possible to re-generate them on non-Linux OSes
(<https://github.com/flutter/flutter/issues/53784>). So uh, setup a Linux VM
or find a friend with a Linux machine.
| engine/shell/platform/embedder/fixtures/README.md/0 | {
"file_path": "engine/shell/platform/embedder/fixtures/README.md",
"repo_id": "engine",
"token_count": 1604
} | 372 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/embedder/tests/embedder_config_builder.h"
#include "flutter/common/constants.h"
#include "flutter/runtime/dart_vm.h"
#include "flutter/shell/platform/embedder/embedder.h"
#include "tests/embedder_test_context.h"
#include "third_party/skia/include/core/SkBitmap.h"
#include "third_party/skia/include/core/SkImage.h"
#ifdef SHELL_ENABLE_GL
#include "flutter/shell/platform/embedder/tests/embedder_test_compositor_gl.h"
#include "flutter/shell/platform/embedder/tests/embedder_test_context_gl.h"
#endif
#ifdef SHELL_ENABLE_VULKAN
#include "flutter/shell/platform/embedder/tests/embedder_test_context_vulkan.h"
#include "flutter/vulkan/vulkan_device.h" // nogncheck
#include "vulkan/vulkan_core.h" // nogncheck
#endif
#ifdef SHELL_ENABLE_METAL
#include "flutter/shell/platform/embedder/tests/embedder_test_context_metal.h"
#endif
namespace flutter {
namespace testing {
EmbedderConfigBuilder::EmbedderConfigBuilder(
EmbedderTestContext& context,
InitializationPreference preference)
: context_(context) {
project_args_.struct_size = sizeof(project_args_);
project_args_.shutdown_dart_vm_when_done = true;
project_args_.platform_message_callback =
[](const FlutterPlatformMessage* message, void* context) {
reinterpret_cast<EmbedderTestContext*>(context)
->PlatformMessageCallback(message);
};
custom_task_runners_.struct_size = sizeof(FlutterCustomTaskRunners);
#ifdef SHELL_ENABLE_GL
opengl_renderer_config_.struct_size = sizeof(FlutterOpenGLRendererConfig);
opengl_renderer_config_.make_current = [](void* context) -> bool {
return reinterpret_cast<EmbedderTestContextGL*>(context)->GLMakeCurrent();
};
opengl_renderer_config_.clear_current = [](void* context) -> bool {
return reinterpret_cast<EmbedderTestContextGL*>(context)->GLClearCurrent();
};
opengl_renderer_config_.present_with_info =
[](void* context, const FlutterPresentInfo* present_info) -> bool {
return reinterpret_cast<EmbedderTestContextGL*>(context)->GLPresent(
*present_info);
};
opengl_renderer_config_.fbo_with_frame_info_callback =
[](void* context, const FlutterFrameInfo* frame_info) -> uint32_t {
return reinterpret_cast<EmbedderTestContextGL*>(context)->GLGetFramebuffer(
*frame_info);
};
opengl_renderer_config_.populate_existing_damage = nullptr;
opengl_renderer_config_.make_resource_current = [](void* context) -> bool {
return reinterpret_cast<EmbedderTestContextGL*>(context)
->GLMakeResourceCurrent();
};
opengl_renderer_config_.gl_proc_resolver = [](void* context,
const char* name) -> void* {
return reinterpret_cast<EmbedderTestContextGL*>(context)->GLGetProcAddress(
name);
};
opengl_renderer_config_.fbo_reset_after_present = true;
opengl_renderer_config_.surface_transformation =
[](void* context) -> FlutterTransformation {
return reinterpret_cast<EmbedderTestContext*>(context)
->GetRootSurfaceTransformation();
};
#endif
#ifdef SHELL_ENABLE_METAL
InitializeMetalRendererConfig();
#endif
#ifdef SHELL_ENABLE_VULKAN
InitializeVulkanRendererConfig();
#endif
software_renderer_config_.struct_size = sizeof(FlutterSoftwareRendererConfig);
software_renderer_config_.surface_present_callback =
[](void* context, const void* allocation, size_t row_bytes,
size_t height) {
auto image_info =
SkImageInfo::MakeN32Premul(SkISize::Make(row_bytes / 4, height));
SkBitmap bitmap;
if (!bitmap.installPixels(image_info, const_cast<void*>(allocation),
row_bytes)) {
FML_LOG(ERROR) << "Could not copy pixels for the software "
"composition from the engine.";
return false;
}
bitmap.setImmutable();
return reinterpret_cast<EmbedderTestContextSoftware*>(context)->Present(
SkImages::RasterFromBitmap(bitmap));
};
// The first argument is always the executable name. Don't make tests have to
// do this manually.
AddCommandLineArgument("embedder_unittest");
if (preference != InitializationPreference::kNoInitialize) {
SetAssetsPath();
SetIsolateCreateCallbackHook();
SetSemanticsCallbackHooks();
SetLogMessageCallbackHook();
SetLocalizationCallbackHooks();
SetChannelUpdateCallbackHook();
AddCommandLineArgument("--disable-vm-service");
if (preference == InitializationPreference::kSnapshotsInitialize ||
preference == InitializationPreference::kMultiAOTInitialize) {
SetSnapshots();
}
if (preference == InitializationPreference::kAOTDataInitialize ||
preference == InitializationPreference::kMultiAOTInitialize) {
SetAOTDataElf();
}
}
}
EmbedderConfigBuilder::~EmbedderConfigBuilder() = default;
FlutterProjectArgs& EmbedderConfigBuilder::GetProjectArgs() {
return project_args_;
}
void EmbedderConfigBuilder::SetSoftwareRendererConfig(SkISize surface_size) {
renderer_config_.type = FlutterRendererType::kSoftware;
renderer_config_.software = software_renderer_config_;
context_.SetupSurface(surface_size);
}
void EmbedderConfigBuilder::SetOpenGLFBOCallBack() {
#ifdef SHELL_ENABLE_GL
// SetOpenGLRendererConfig must be called before this.
FML_CHECK(renderer_config_.type == FlutterRendererType::kOpenGL);
renderer_config_.open_gl.fbo_callback = [](void* context) -> uint32_t {
FlutterFrameInfo frame_info = {};
// fbo_callback doesn't use the frame size information, only
// fbo_callback_with_frame_info does.
frame_info.struct_size = sizeof(FlutterFrameInfo);
frame_info.size.width = 0;
frame_info.size.height = 0;
return reinterpret_cast<EmbedderTestContextGL*>(context)->GLGetFramebuffer(
frame_info);
};
#endif
}
void EmbedderConfigBuilder::SetOpenGLPresentCallBack() {
#ifdef SHELL_ENABLE_GL
// SetOpenGLRendererConfig must be called before this.
FML_CHECK(renderer_config_.type == FlutterRendererType::kOpenGL);
renderer_config_.open_gl.present = [](void* context) -> bool {
// passing a placeholder fbo_id.
return reinterpret_cast<EmbedderTestContextGL*>(context)->GLPresent(
FlutterPresentInfo{
.fbo_id = 0,
});
};
#endif
}
void EmbedderConfigBuilder::SetRendererConfig(EmbedderTestContextType type,
SkISize surface_size) {
switch (type) {
case EmbedderTestContextType::kOpenGLContext:
SetOpenGLRendererConfig(surface_size);
break;
case EmbedderTestContextType::kMetalContext:
SetMetalRendererConfig(surface_size);
break;
case EmbedderTestContextType::kVulkanContext:
SetVulkanRendererConfig(surface_size);
break;
case EmbedderTestContextType::kSoftwareContext:
SetSoftwareRendererConfig(surface_size);
break;
}
}
void EmbedderConfigBuilder::SetOpenGLRendererConfig(SkISize surface_size) {
#ifdef SHELL_ENABLE_GL
renderer_config_.type = FlutterRendererType::kOpenGL;
renderer_config_.open_gl = opengl_renderer_config_;
context_.SetupSurface(surface_size);
#endif
}
void EmbedderConfigBuilder::SetMetalRendererConfig(SkISize surface_size) {
#ifdef SHELL_ENABLE_METAL
renderer_config_.type = FlutterRendererType::kMetal;
renderer_config_.metal = metal_renderer_config_;
context_.SetupSurface(surface_size);
#endif
}
void EmbedderConfigBuilder::SetVulkanRendererConfig(
SkISize surface_size,
std::optional<FlutterVulkanInstanceProcAddressCallback>
instance_proc_address_callback) {
#ifdef SHELL_ENABLE_VULKAN
renderer_config_.type = FlutterRendererType::kVulkan;
FlutterVulkanRendererConfig vulkan_renderer_config = vulkan_renderer_config_;
if (instance_proc_address_callback.has_value()) {
vulkan_renderer_config.get_instance_proc_address_callback =
instance_proc_address_callback.value();
}
renderer_config_.vulkan = vulkan_renderer_config;
context_.SetupSurface(surface_size);
#endif
}
void EmbedderConfigBuilder::SetAssetsPath() {
project_args_.assets_path = context_.GetAssetsPath().c_str();
}
void EmbedderConfigBuilder::SetSnapshots() {
if (auto mapping = context_.GetVMSnapshotData()) {
project_args_.vm_snapshot_data = mapping->GetMapping();
project_args_.vm_snapshot_data_size = mapping->GetSize();
}
if (auto mapping = context_.GetVMSnapshotInstructions()) {
project_args_.vm_snapshot_instructions = mapping->GetMapping();
project_args_.vm_snapshot_instructions_size = mapping->GetSize();
}
if (auto mapping = context_.GetIsolateSnapshotData()) {
project_args_.isolate_snapshot_data = mapping->GetMapping();
project_args_.isolate_snapshot_data_size = mapping->GetSize();
}
if (auto mapping = context_.GetIsolateSnapshotInstructions()) {
project_args_.isolate_snapshot_instructions = mapping->GetMapping();
project_args_.isolate_snapshot_instructions_size = mapping->GetSize();
}
}
void EmbedderConfigBuilder::SetAOTDataElf() {
project_args_.aot_data = context_.GetAOTData();
}
void EmbedderConfigBuilder::SetIsolateCreateCallbackHook() {
project_args_.root_isolate_create_callback =
EmbedderTestContext::GetIsolateCreateCallbackHook();
}
void EmbedderConfigBuilder::SetSemanticsCallbackHooks() {
project_args_.update_semantics_callback2 =
context_.GetUpdateSemanticsCallback2Hook();
project_args_.update_semantics_callback =
context_.GetUpdateSemanticsCallbackHook();
project_args_.update_semantics_node_callback =
context_.GetUpdateSemanticsNodeCallbackHook();
project_args_.update_semantics_custom_action_callback =
context_.GetUpdateSemanticsCustomActionCallbackHook();
}
void EmbedderConfigBuilder::SetLogMessageCallbackHook() {
project_args_.log_message_callback =
EmbedderTestContext::GetLogMessageCallbackHook();
}
void EmbedderConfigBuilder::SetChannelUpdateCallbackHook() {
project_args_.channel_update_callback =
context_.GetChannelUpdateCallbackHook();
}
void EmbedderConfigBuilder::SetLogTag(std::string tag) {
log_tag_ = std::move(tag);
project_args_.log_tag = log_tag_.c_str();
}
void EmbedderConfigBuilder::SetLocalizationCallbackHooks() {
project_args_.compute_platform_resolved_locale_callback =
EmbedderTestContext::GetComputePlatformResolvedLocaleCallbackHook();
}
void EmbedderConfigBuilder::SetExecutableName(std::string executable_name) {
if (executable_name.empty()) {
return;
}
command_line_arguments_[0] = std::move(executable_name);
}
void EmbedderConfigBuilder::SetDartEntrypoint(std::string entrypoint) {
if (entrypoint.empty()) {
return;
}
dart_entrypoint_ = std::move(entrypoint);
project_args_.custom_dart_entrypoint = dart_entrypoint_.c_str();
}
void EmbedderConfigBuilder::AddCommandLineArgument(std::string arg) {
if (arg.empty()) {
return;
}
command_line_arguments_.emplace_back(std::move(arg));
}
void EmbedderConfigBuilder::AddDartEntrypointArgument(std::string arg) {
if (arg.empty()) {
return;
}
dart_entrypoint_arguments_.emplace_back(std::move(arg));
}
void EmbedderConfigBuilder::SetPlatformTaskRunner(
const FlutterTaskRunnerDescription* runner) {
if (runner == nullptr) {
return;
}
custom_task_runners_.platform_task_runner = runner;
project_args_.custom_task_runners = &custom_task_runners_;
}
void EmbedderConfigBuilder::SetupVsyncCallback() {
project_args_.vsync_callback = [](void* user_data, intptr_t baton) {
auto context = reinterpret_cast<EmbedderTestContext*>(user_data);
context->RunVsyncCallback(baton);
};
}
FlutterRendererConfig& EmbedderConfigBuilder::GetRendererConfig() {
return renderer_config_;
}
void EmbedderConfigBuilder::SetRenderTaskRunner(
const FlutterTaskRunnerDescription* runner) {
if (runner == nullptr) {
return;
}
custom_task_runners_.render_task_runner = runner;
project_args_.custom_task_runners = &custom_task_runners_;
}
void EmbedderConfigBuilder::SetPlatformMessageCallback(
const std::function<void(const FlutterPlatformMessage*)>& callback) {
context_.SetPlatformMessageCallback(callback);
}
void EmbedderConfigBuilder::SetCompositor(bool avoid_backing_store_cache,
bool use_present_layers_callback) {
context_.SetupCompositor();
auto& compositor = context_.GetCompositor();
compositor_.struct_size = sizeof(compositor_);
compositor_.user_data = &compositor;
compositor_.create_backing_store_callback =
[](const FlutterBackingStoreConfig* config, //
FlutterBackingStore* backing_store_out, //
void* user_data //
) {
return reinterpret_cast<EmbedderTestCompositor*>(user_data)
->CreateBackingStore(config, backing_store_out);
};
compositor_.collect_backing_store_callback =
[](const FlutterBackingStore* backing_store, //
void* user_data //
) {
return reinterpret_cast<EmbedderTestCompositor*>(user_data)
->CollectBackingStore(backing_store);
};
if (use_present_layers_callback) {
compositor_.present_view_callback = [](const FlutterPresentViewInfo* info) {
auto compositor =
reinterpret_cast<EmbedderTestCompositor*>(info->user_data);
return compositor->Present(info->view_id, info->layers,
info->layers_count);
};
} else {
compositor_.present_layers_callback = [](const FlutterLayer** layers,
size_t layers_count,
void* user_data) {
auto compositor = reinterpret_cast<EmbedderTestCompositor*>(user_data);
// The present layers callback is incompatible with multiple views;
// it can only be used to render the implicit view.
return compositor->Present(kFlutterImplicitViewId, layers, layers_count);
};
}
compositor_.avoid_backing_store_cache = avoid_backing_store_cache;
project_args_.compositor = &compositor_;
}
FlutterCompositor& EmbedderConfigBuilder::GetCompositor() {
return compositor_;
}
void EmbedderConfigBuilder::SetRenderTargetType(
EmbedderTestBackingStoreProducer::RenderTargetType type,
FlutterSoftwarePixelFormat software_pixfmt) {
auto& compositor = context_.GetCompositor();
// TODO(wrightgeorge): figure out a better way of plumbing through the
// GrDirectContext
compositor.SetBackingStoreProducer(
std::make_unique<EmbedderTestBackingStoreProducer>(
compositor.GetGrContext(), type, software_pixfmt));
}
UniqueEngine EmbedderConfigBuilder::LaunchEngine() const {
return SetupEngine(true);
}
UniqueEngine EmbedderConfigBuilder::InitializeEngine() const {
return SetupEngine(false);
}
UniqueEngine EmbedderConfigBuilder::SetupEngine(bool run) const {
FlutterEngine engine = nullptr;
FlutterProjectArgs project_args = project_args_;
std::vector<const char*> args;
args.reserve(command_line_arguments_.size());
for (const auto& arg : command_line_arguments_) {
args.push_back(arg.c_str());
}
if (!args.empty()) {
project_args.command_line_argv = args.data();
project_args.command_line_argc = args.size();
} else {
// Clear it out in case this is not the first engine launch from the
// embedder config builder.
project_args.command_line_argv = nullptr;
project_args.command_line_argc = 0;
}
std::vector<const char*> dart_args;
dart_args.reserve(dart_entrypoint_arguments_.size());
for (const auto& arg : dart_entrypoint_arguments_) {
dart_args.push_back(arg.c_str());
}
if (!dart_args.empty()) {
project_args.dart_entrypoint_argv = dart_args.data();
project_args.dart_entrypoint_argc = dart_args.size();
} else {
// Clear it out in case this is not the first engine launch from the
// embedder config builder.
project_args.dart_entrypoint_argv = nullptr;
project_args.dart_entrypoint_argc = 0;
}
auto result =
run ? FlutterEngineRun(FLUTTER_ENGINE_VERSION, &renderer_config_,
&project_args, &context_, &engine)
: FlutterEngineInitialize(FLUTTER_ENGINE_VERSION, &renderer_config_,
&project_args, &context_, &engine);
if (result != kSuccess) {
return {};
}
return UniqueEngine{engine};
}
#ifdef SHELL_ENABLE_METAL
void EmbedderConfigBuilder::InitializeMetalRendererConfig() {
if (context_.GetContextType() != EmbedderTestContextType::kMetalContext) {
return;
}
metal_renderer_config_.struct_size = sizeof(metal_renderer_config_);
EmbedderTestContextMetal& metal_context =
reinterpret_cast<EmbedderTestContextMetal&>(context_);
metal_renderer_config_.device =
metal_context.GetTestMetalContext()->GetMetalDevice();
metal_renderer_config_.present_command_queue =
metal_context.GetTestMetalContext()->GetMetalCommandQueue();
metal_renderer_config_.get_next_drawable_callback =
[](void* user_data, const FlutterFrameInfo* frame_info) {
return reinterpret_cast<EmbedderTestContextMetal*>(user_data)
->GetNextDrawable(frame_info);
};
metal_renderer_config_.present_drawable_callback =
[](void* user_data, const FlutterMetalTexture* texture) -> bool {
EmbedderTestContextMetal* metal_context =
reinterpret_cast<EmbedderTestContextMetal*>(user_data);
return metal_context->Present(texture->texture_id);
};
metal_renderer_config_.external_texture_frame_callback =
[](void* user_data, int64_t texture_id, size_t width, size_t height,
FlutterMetalExternalTexture* texture_out) -> bool {
EmbedderTestContextMetal* metal_context =
reinterpret_cast<EmbedderTestContextMetal*>(user_data);
return metal_context->PopulateExternalTexture(texture_id, width, height,
texture_out);
};
}
#endif // SHELL_ENABLE_METAL
#ifdef SHELL_ENABLE_VULKAN
void EmbedderConfigBuilder::InitializeVulkanRendererConfig() {
if (context_.GetContextType() != EmbedderTestContextType::kVulkanContext) {
return;
}
vulkan_renderer_config_.struct_size = sizeof(FlutterVulkanRendererConfig);
vulkan_renderer_config_.version =
static_cast<EmbedderTestContextVulkan&>(context_)
.vulkan_context_->application_->GetAPIVersion();
vulkan_renderer_config_.instance =
static_cast<EmbedderTestContextVulkan&>(context_)
.vulkan_context_->application_->GetInstance();
vulkan_renderer_config_.physical_device =
static_cast<EmbedderTestContextVulkan&>(context_)
.vulkan_context_->device_->GetPhysicalDeviceHandle();
vulkan_renderer_config_.device =
static_cast<EmbedderTestContextVulkan&>(context_)
.vulkan_context_->device_->GetHandle();
vulkan_renderer_config_.queue_family_index =
static_cast<EmbedderTestContextVulkan&>(context_)
.vulkan_context_->device_->GetGraphicsQueueIndex();
vulkan_renderer_config_.queue =
static_cast<EmbedderTestContextVulkan&>(context_)
.vulkan_context_->device_->GetQueueHandle();
vulkan_renderer_config_.get_instance_proc_address_callback =
EmbedderTestContextVulkan::InstanceProcAddr;
vulkan_renderer_config_.get_next_image_callback =
[](void* context,
const FlutterFrameInfo* frame_info) -> FlutterVulkanImage {
VkImage image =
reinterpret_cast<EmbedderTestContextVulkan*>(context)->GetNextImage(
{static_cast<int>(frame_info->size.width),
static_cast<int>(frame_info->size.height)});
return {
.struct_size = sizeof(FlutterVulkanImage),
.image = reinterpret_cast<uint64_t>(image),
.format = VK_FORMAT_R8G8B8A8_UNORM,
};
};
vulkan_renderer_config_.present_image_callback =
[](void* context, const FlutterVulkanImage* image) -> bool {
return reinterpret_cast<EmbedderTestContextVulkan*>(context)->PresentImage(
reinterpret_cast<VkImage>(image->image));
};
}
#endif
} // namespace testing
} // namespace flutter
| engine/shell/platform/embedder/tests/embedder_config_builder.cc/0 | {
"file_path": "engine/shell/platform/embedder/tests/embedder_config_builder.cc",
"repo_id": "engine",
"token_count": 7581
} | 373 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/embedder/tests/embedder_test_compositor_software.h"
#include "flutter/fml/logging.h"
#include "flutter/shell/platform/embedder/tests/embedder_assertions.h"
#include "third_party/skia/include/core/SkSurface.h"
namespace flutter {
namespace testing {
EmbedderTestCompositorSoftware::EmbedderTestCompositorSoftware(
SkISize surface_size)
: EmbedderTestCompositor(surface_size, nullptr) {}
EmbedderTestCompositorSoftware::~EmbedderTestCompositorSoftware() = default;
bool EmbedderTestCompositorSoftware::UpdateOffscrenComposition(
const FlutterLayer** layers,
size_t layers_count) {
last_composition_ = nullptr;
const auto image_info = SkImageInfo::MakeN32Premul(surface_size_);
auto surface = SkSurfaces::Raster(image_info);
if (!surface) {
FML_LOG(ERROR) << "Could not update the off-screen composition.";
return false;
}
auto canvas = surface->getCanvas();
// This has to be transparent because we are going to be compositing this
// sub-hierarchy onto the on-screen surface.
canvas->clear(SK_ColorTRANSPARENT);
for (size_t i = 0; i < layers_count; ++i) {
const auto* layer = layers[i];
sk_sp<SkImage> platform_rendered_contents;
sk_sp<SkImage> layer_image;
SkIPoint canvas_offset = SkIPoint::Make(0, 0);
switch (layer->type) {
case kFlutterLayerContentTypeBackingStore:
layer_image =
reinterpret_cast<SkSurface*>(layer->backing_store->user_data)
->makeImageSnapshot();
break;
case kFlutterLayerContentTypePlatformView:
layer_image = platform_view_renderer_callback_
? platform_view_renderer_callback_(*layer, nullptr)
: nullptr;
canvas_offset = SkIPoint::Make(layer->offset.x, layer->offset.y);
break;
};
// If the layer is not a platform view but the engine did not specify an
// image for the backing store, it is an error.
if (!layer_image && layer->type != kFlutterLayerContentTypePlatformView) {
FML_LOG(ERROR) << "Could not snapshot layer in test compositor: "
<< *layer;
return false;
}
// The test could have just specified no contents to be rendered in place of
// a platform view. This is not an error.
if (layer_image) {
// The image rendered by Flutter already has the correct offset and
// transformation applied. The layers offset is meant for the platform.
canvas->drawImage(layer_image.get(), canvas_offset.x(),
canvas_offset.y());
}
}
last_composition_ = surface->makeImageSnapshot();
if (!last_composition_) {
FML_LOG(ERROR) << "Could not update the contents of the sub-composition.";
return false;
}
if (next_scene_callback_) {
auto last_composition_snapshot = last_composition_->makeRasterImage();
FML_CHECK(last_composition_snapshot);
auto callback = next_scene_callback_;
next_scene_callback_ = nullptr;
callback(std::move(last_composition_snapshot));
}
return true;
}
} // namespace testing
} // namespace flutter
| engine/shell/platform/embedder/tests/embedder_test_compositor_software.cc/0 | {
"file_path": "engine/shell/platform/embedder/tests/embedder_test_compositor_software.cc",
"repo_id": "engine",
"token_count": 1205
} | 374 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#define FML_USED_ON_EMBEDDER
#include <limits>
#include <utility>
#include "flutter/shell/platform/embedder/tests/embedder_test_backingstore_producer.h"
#include "flutter/shell/platform/embedder/tests/embedder_unittests_util.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/core/SkSurface.h"
#include "third_party/skia/include/encode/SkPngEncoder.h"
#include "third_party/skia/include/gpu/ganesh/SkSurfaceGanesh.h"
namespace flutter {
namespace testing {
sk_sp<SkSurface> CreateRenderSurface(const FlutterLayer& layer,
GrDirectContext* context) {
const auto image_info =
SkImageInfo::MakeN32Premul(layer.size.width, layer.size.height);
auto surface = context ? SkSurfaces::RenderTarget(
context, // context
skgpu::Budgeted::kNo, // budgeted
image_info, // image info
1, // sample count
kTopLeft_GrSurfaceOrigin, // surface origin
nullptr, // surface properties
false // mipmaps
)
: SkSurfaces::Raster(image_info);
FML_CHECK(surface != nullptr);
return surface;
}
// Normalizes the color-space, color-type and alpha-type for comparison.
static sk_sp<SkData> NormalizeImage(const sk_sp<SkImage>& image) {
// To avoid clipping, convert to a very wide gamut, and a high bit depth.
sk_sp<SkColorSpace> norm_colorspace = SkColorSpace::MakeRGB(
SkNamedTransferFn::kRec2020, SkNamedGamut::kRec2020);
SkImageInfo norm_image_info =
SkImageInfo::Make(image->width(), image->height(),
SkColorType::kR16G16B16A16_unorm_SkColorType,
SkAlphaType::kUnpremul_SkAlphaType, norm_colorspace);
size_t row_bytes = norm_image_info.minRowBytes();
size_t size = norm_image_info.computeByteSize(row_bytes);
sk_sp<SkData> data = SkData::MakeUninitialized(size);
if (!data) {
FML_CHECK(false) << "Unable to allocate data.";
}
bool success = image->readPixels(norm_image_info, data->writable_data(),
row_bytes, 0, 0);
if (!success) {
FML_CHECK(false) << "Unable to read pixels.";
}
return data;
}
bool RasterImagesAreSame(const sk_sp<SkImage>& a, const sk_sp<SkImage>& b) {
if (!a || !b) {
return false;
}
FML_CHECK(!a->isTextureBacked());
FML_CHECK(!b->isTextureBacked());
sk_sp<SkData> normalized_a = NormalizeImage(a);
sk_sp<SkData> normalized_b = NormalizeImage(b);
return normalized_a->equals(normalized_b.get());
}
std::string FixtureNameForBackend(EmbedderTestContextType backend,
const std::string& name) {
switch (backend) {
case EmbedderTestContextType::kVulkanContext:
return "vk_" + name;
default:
return name;
}
}
EmbedderTestBackingStoreProducer::RenderTargetType GetRenderTargetFromBackend(
EmbedderTestContextType backend,
bool opengl_framebuffer) {
switch (backend) {
case EmbedderTestContextType::kVulkanContext:
return EmbedderTestBackingStoreProducer::RenderTargetType::kVulkanImage;
case EmbedderTestContextType::kOpenGLContext:
if (opengl_framebuffer) {
return EmbedderTestBackingStoreProducer::RenderTargetType::
kOpenGLFramebuffer;
}
return EmbedderTestBackingStoreProducer::RenderTargetType::kOpenGLTexture;
case EmbedderTestContextType::kMetalContext:
return EmbedderTestBackingStoreProducer::RenderTargetType::kMetalTexture;
case EmbedderTestContextType::kSoftwareContext:
return EmbedderTestBackingStoreProducer::RenderTargetType::
kSoftwareBuffer;
}
}
void ConfigureBackingStore(FlutterBackingStore& backing_store,
EmbedderTestContextType backend,
bool opengl_framebuffer) {
switch (backend) {
case EmbedderTestContextType::kVulkanContext:
backing_store.type = kFlutterBackingStoreTypeVulkan;
break;
case EmbedderTestContextType::kOpenGLContext:
if (opengl_framebuffer) {
backing_store.type = kFlutterBackingStoreTypeOpenGL;
backing_store.open_gl.type = kFlutterOpenGLTargetTypeFramebuffer;
} else {
backing_store.type = kFlutterBackingStoreTypeOpenGL;
backing_store.open_gl.type = kFlutterOpenGLTargetTypeTexture;
}
break;
case EmbedderTestContextType::kMetalContext:
backing_store.type = kFlutterBackingStoreTypeMetal;
break;
case EmbedderTestContextType::kSoftwareContext:
backing_store.type = kFlutterBackingStoreTypeSoftware;
break;
}
}
bool WriteImageToDisk(const fml::UniqueFD& directory,
const std::string& name,
const sk_sp<SkImage>& image) {
if (!image) {
return false;
}
auto data = SkPngEncoder::Encode(nullptr, image.get(), {});
if (!data) {
return false;
}
fml::NonOwnedMapping mapping(static_cast<const uint8_t*>(data->data()),
data->size());
return WriteAtomically(directory, name.c_str(), mapping);
}
bool ImageMatchesFixture(const std::string& fixture_file_name,
const sk_sp<SkImage>& scene_image) {
fml::FileMapping fixture_image_mapping(OpenFixture(fixture_file_name));
FML_CHECK(fixture_image_mapping.GetSize() != 0u)
<< "Could not find fixture: " << fixture_file_name;
auto encoded_image = SkData::MakeWithoutCopy(
fixture_image_mapping.GetMapping(), fixture_image_mapping.GetSize());
auto fixture_image =
SkImages::DeferredFromEncodedData(std::move(encoded_image))
->makeRasterImage();
FML_CHECK(fixture_image) << "Could not create image from fixture: "
<< fixture_file_name;
FML_CHECK(scene_image) << "Invalid scene image.";
auto scene_image_subset = scene_image->makeSubset(
nullptr,
SkIRect::MakeWH(fixture_image->width(), fixture_image->height()));
FML_CHECK(scene_image_subset)
<< "Could not create image subset for fixture comparison: "
<< scene_image_subset;
const auto images_are_same =
RasterImagesAreSame(scene_image_subset, fixture_image);
// If the images are not the same, this predicate is going to indicate test
// failure. Dump both the actual image and the expectation to disk to the
// test author can figure out what went wrong.
if (!images_are_same) {
const auto fixtures_path = GetFixturesPath();
const auto actual_file_name = "actual_" + fixture_file_name;
const auto expect_file_name = "expectation_" + fixture_file_name;
auto fixtures_fd = OpenFixturesDirectory();
FML_CHECK(
WriteImageToDisk(fixtures_fd, actual_file_name, scene_image_subset))
<< "Could not write file to disk: " << actual_file_name;
FML_CHECK(WriteImageToDisk(fixtures_fd, expect_file_name, fixture_image))
<< "Could not write file to disk: " << expect_file_name;
FML_LOG(ERROR) << "Image did not match expectation." << std::endl
<< "Expected:"
<< fml::paths::JoinPaths({fixtures_path, expect_file_name})
<< std::endl
<< "Got:"
<< fml::paths::JoinPaths({fixtures_path, actual_file_name})
<< std::endl;
}
return images_are_same;
}
bool ImageMatchesFixture(const std::string& fixture_file_name,
std::future<sk_sp<SkImage>>& scene_image) {
return ImageMatchesFixture(fixture_file_name, scene_image.get());
}
bool SurfacePixelDataMatchesBytes(SkSurface* surface,
const std::vector<uint8_t>& bytes) {
SkPixmap pixmap;
auto ok = surface->peekPixels(&pixmap);
if (!ok) {
return false;
}
auto matches = (pixmap.rowBytes() == bytes.size()) &&
(memcmp(bytes.data(), pixmap.addr(), bytes.size()) == 0);
if (!matches) {
FML_LOG(ERROR) << "SkImage pixel data didn't match bytes.";
{
const uint8_t* addr = static_cast<const uint8_t*>(pixmap.addr());
std::stringstream stream;
for (size_t i = 0; i < pixmap.computeByteSize(); ++i) {
stream << "0x" << std::setfill('0') << std::setw(2) << std::uppercase
<< std::hex << static_cast<int>(addr[i]);
if (i != pixmap.computeByteSize() - 1) {
stream << ", ";
}
}
FML_LOG(ERROR) << " Actual: " << stream.str();
}
{
std::stringstream stream;
for (auto b = bytes.begin(); b != bytes.end(); ++b) {
stream << "0x" << std::setfill('0') << std::setw(2) << std::uppercase
<< std::hex << static_cast<int>(*b);
if (b != bytes.end() - 1) {
stream << ", ";
}
}
FML_LOG(ERROR) << " Expected: " << stream.str();
}
}
return matches;
}
bool SurfacePixelDataMatchesBytes(std::future<SkSurface*>& surface_future,
const std::vector<uint8_t>& bytes) {
return SurfacePixelDataMatchesBytes(surface_future.get(), bytes);
}
void FilterMutationsByType(
const FlutterPlatformViewMutation** mutations,
size_t count,
FlutterPlatformViewMutationType type,
const std::function<void(const FlutterPlatformViewMutation& mutation)>&
handler) {
if (mutations == nullptr) {
return;
}
for (size_t i = 0; i < count; ++i) {
const FlutterPlatformViewMutation* mutation = mutations[i];
if (mutation->type != type) {
continue;
}
handler(*mutation);
}
}
void FilterMutationsByType(
const FlutterPlatformView* view,
FlutterPlatformViewMutationType type,
const std::function<void(const FlutterPlatformViewMutation& mutation)>&
handler) {
return FilterMutationsByType(view->mutations, view->mutations_count, type,
handler);
}
SkMatrix GetTotalMutationTransformationMatrix(
const FlutterPlatformViewMutation** mutations,
size_t count) {
SkMatrix collected;
FilterMutationsByType(
mutations, count, kFlutterPlatformViewMutationTypeTransformation,
[&](const auto& mutation) {
collected.preConcat(SkMatrixMake(mutation.transformation));
});
return collected;
}
SkMatrix GetTotalMutationTransformationMatrix(const FlutterPlatformView* view) {
return GetTotalMutationTransformationMatrix(view->mutations,
view->mutations_count);
}
} // namespace testing
} // namespace flutter
| engine/shell/platform/embedder/tests/embedder_unittests_util.cc/0 | {
"file_path": "engine/shell/platform/embedder/tests/embedder_unittests_util.cc",
"repo_id": "engine",
"token_count": 4633
} | 375 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
part of zircon;
// ignore_for_file: native_function_body_in_non_sdk_code
// ignore_for_file: public_member_api_docs
@pragma('vm:entry-point')
base class Handle extends NativeFieldWrapperClass1 {
// No public constructor - this can only be created from native code.
@pragma('vm:entry-point')
Handle._();
// Create an invalid handle object.
factory Handle.invalid() {
return _createInvalid();
}
@pragma('vm:external-name', 'Handle_CreateInvalid')
external static Handle _createInvalid();
@pragma('vm:external-name', 'Handle_handle')
external int get handle;
@pragma('vm:external-name', 'Handle_koid')
external int get koid;
@override
String toString() => 'Handle($handle)';
@override
bool operator ==(Object other) {
return other is Handle
&& other.handle == handle;
}
@override
int get hashCode => handle.hashCode;
// Common handle operations.
@pragma('vm:external-name', 'Handle_is_valid')
external bool get isValid;
@pragma('vm:external-name', 'Handle_Close')
external int close();
@pragma('vm:external-name', 'Handle_AsyncWait')
external HandleWaiter asyncWait(int signals, AsyncWaitCallback callback);
@pragma('vm:external-name', 'Handle_Duplicate')
external Handle duplicate(int rights);
@pragma('vm:external-name', 'Handle_Replace')
external Handle replace(int rights);
}
@pragma('vm:entry-point')
class _OnWaitCompleteClosure {
// No public constructor - this can only be created from native code.
@pragma('vm:entry-point')
_OnWaitCompleteClosure(this._callback, this._arg1, this._arg2);
Function _callback;
Object _arg1;
Object _arg2;
@pragma('vm:entry-point')
Function get _closure => () => _callback(_arg1, _arg2);
}
| engine/shell/platform/fuchsia/dart-pkg/zircon/lib/src/handle.dart/0 | {
"file_path": "engine/shell/platform/fuchsia/dart-pkg/zircon/lib/src/handle.dart",
"repo_id": "engine",
"token_count": 613
} | 376 |
// AUTO GENERATED FILE, DO NOT EDIT.
//
// Generated by `package:ffigen`.
import 'dart:ffi' as ffi;
/// Bindings for `dart:zircon_ffi`.
class ZirconFFIBindings {
/// Holds the symbol lookup function.
final ffi.Pointer<T> Function<T extends ffi.NativeType>(String symbolName)
_lookup;
/// The symbols are looked up in [dynamicLibrary].
ZirconFFIBindings(ffi.DynamicLibrary dynamicLibrary)
: _lookup = dynamicLibrary.lookup;
/// The symbols are looked up with [lookup].
ZirconFFIBindings.fromLookup(
ffi.Pointer<T> Function<T extends ffi.NativeType>(String symbolName)
lookup)
: _lookup = lookup;
ffi.Pointer<zircon_dart_byte_array_t> zircon_dart_byte_array_create(
int size,
) {
return _zircon_dart_byte_array_create(
size,
);
}
late final _zircon_dart_byte_array_create_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_byte_array_create>>(
'zircon_dart_byte_array_create');
late final _dart_zircon_dart_byte_array_create
_zircon_dart_byte_array_create = _zircon_dart_byte_array_create_ptr
.asFunction<_dart_zircon_dart_byte_array_create>();
void zircon_dart_byte_array_set_value(
ffi.Pointer<zircon_dart_byte_array_t> arr,
int index,
int value,
) {
return _zircon_dart_byte_array_set_value(
arr,
index,
value,
);
}
late final _zircon_dart_byte_array_set_value_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_byte_array_set_value>>(
'zircon_dart_byte_array_set_value');
late final _dart_zircon_dart_byte_array_set_value
_zircon_dart_byte_array_set_value = _zircon_dart_byte_array_set_value_ptr
.asFunction<_dart_zircon_dart_byte_array_set_value>();
void zircon_dart_byte_array_free(
ffi.Pointer<zircon_dart_byte_array_t> arr,
) {
return _zircon_dart_byte_array_free(
arr,
);
}
late final _zircon_dart_byte_array_free_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_byte_array_free>>(
'zircon_dart_byte_array_free');
late final _dart_zircon_dart_byte_array_free _zircon_dart_byte_array_free =
_zircon_dart_byte_array_free_ptr
.asFunction<_dart_zircon_dart_byte_array_free>();
ffi.Pointer<zircon_dart_handle_list_t> zircon_dart_handle_list_create() {
return _zircon_dart_handle_list_create();
}
late final _zircon_dart_handle_list_create_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_handle_list_create>>(
'zircon_dart_handle_list_create');
late final _dart_zircon_dart_handle_list_create
_zircon_dart_handle_list_create = _zircon_dart_handle_list_create_ptr
.asFunction<_dart_zircon_dart_handle_list_create>();
void zircon_dart_handle_list_append(
ffi.Pointer<zircon_dart_handle_list_t> list,
ffi.Pointer<zircon_dart_handle_t> handle,
) {
return _zircon_dart_handle_list_append(
list,
handle,
);
}
late final _zircon_dart_handle_list_append_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_handle_list_append>>(
'zircon_dart_handle_list_append');
late final _dart_zircon_dart_handle_list_append
_zircon_dart_handle_list_append = _zircon_dart_handle_list_append_ptr
.asFunction<_dart_zircon_dart_handle_list_append>();
void zircon_dart_handle_list_free(
ffi.Pointer<zircon_dart_handle_list_t> list,
) {
return _zircon_dart_handle_list_free(
list,
);
}
late final _zircon_dart_handle_list_free_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_handle_list_free>>(
'zircon_dart_handle_list_free');
late final _dart_zircon_dart_handle_list_free _zircon_dart_handle_list_free =
_zircon_dart_handle_list_free_ptr
.asFunction<_dart_zircon_dart_handle_list_free>();
int zircon_dart_handle_is_valid(
ffi.Pointer<zircon_dart_handle_t> handle,
) {
return _zircon_dart_handle_is_valid(
handle,
);
}
late final _zircon_dart_handle_is_valid_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_handle_is_valid>>(
'zircon_dart_handle_is_valid');
late final _dart_zircon_dart_handle_is_valid _zircon_dart_handle_is_valid =
_zircon_dart_handle_is_valid_ptr
.asFunction<_dart_zircon_dart_handle_is_valid>();
int zircon_dart_handle_close(
ffi.Pointer<zircon_dart_handle_t> handle,
) {
return _zircon_dart_handle_close(
handle,
);
}
late final _zircon_dart_handle_close_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_handle_close>>(
'zircon_dart_handle_close');
late final _dart_zircon_dart_handle_close _zircon_dart_handle_close =
_zircon_dart_handle_close_ptr
.asFunction<_dart_zircon_dart_handle_close>();
void zircon_dart_handle_free(
ffi.Pointer<zircon_dart_handle_t> handle,
) {
return _zircon_dart_handle_free(
handle,
);
}
late final _zircon_dart_handle_free_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_handle_free>>(
'zircon_dart_handle_free');
late final _dart_zircon_dart_handle_free _zircon_dart_handle_free =
_zircon_dart_handle_free_ptr.asFunction<_dart_zircon_dart_handle_free>();
int zircon_dart_handle_pair_attach_finalizer(
Object object,
ffi.Pointer<ffi.Void> pointer,
int external_allocation_size,
) {
return _zircon_dart_handle_pair_attach_finalizer(
object,
pointer,
external_allocation_size,
);
}
late final _zircon_dart_handle_pair_attach_finalizer_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_handle_pair_attach_finalizer>>(
'zircon_dart_handle_pair_attach_finalizer');
late final _dart_zircon_dart_handle_pair_attach_finalizer
_zircon_dart_handle_pair_attach_finalizer =
_zircon_dart_handle_pair_attach_finalizer_ptr
.asFunction<_dart_zircon_dart_handle_pair_attach_finalizer>();
int zircon_dart_handle_attach_finalizer(
Object object,
ffi.Pointer<ffi.Void> pointer,
int external_allocation_size,
) {
return _zircon_dart_handle_attach_finalizer(
object,
pointer,
external_allocation_size,
);
}
late final _zircon_dart_handle_attach_finalizer_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_handle_attach_finalizer>>(
'zircon_dart_handle_attach_finalizer');
late final _dart_zircon_dart_handle_attach_finalizer
_zircon_dart_handle_attach_finalizer =
_zircon_dart_handle_attach_finalizer_ptr
.asFunction<_dart_zircon_dart_handle_attach_finalizer>();
ffi.Pointer<zircon_dart_handle_pair_t> zircon_dart_channel_create(
int options,
) {
return _zircon_dart_channel_create(
options,
);
}
late final _zircon_dart_channel_create_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_channel_create>>(
'zircon_dart_channel_create');
late final _dart_zircon_dart_channel_create _zircon_dart_channel_create =
_zircon_dart_channel_create_ptr
.asFunction<_dart_zircon_dart_channel_create>();
int zircon_dart_channel_write(
ffi.Pointer<zircon_dart_handle_t> handle,
ffi.Pointer<zircon_dart_byte_array_t> bytes,
ffi.Pointer<zircon_dart_handle_list_t> handles,
) {
return _zircon_dart_channel_write(
handle,
bytes,
handles,
);
}
late final _zircon_dart_channel_write_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_channel_write>>(
'zircon_dart_channel_write');
late final _dart_zircon_dart_channel_write _zircon_dart_channel_write =
_zircon_dart_channel_write_ptr
.asFunction<_dart_zircon_dart_channel_write>();
int zircon_dart_clock_get_monotonic() {
return _zircon_dart_clock_get_monotonic();
}
late final _zircon_dart_clock_get_monotonic_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_clock_get_monotonic>>(
'zircon_dart_clock_get_monotonic');
late final _dart_zircon_dart_clock_get_monotonic
_zircon_dart_clock_get_monotonic = _zircon_dart_clock_get_monotonic_ptr
.asFunction<_dart_zircon_dart_clock_get_monotonic>();
int zircon_dart_dl_initialize(
ffi.Pointer<ffi.Void> initialize_api_dl_data,
) {
return _zircon_dart_dl_initialize(
initialize_api_dl_data,
);
}
late final _zircon_dart_dl_initialize_ptr =
_lookup<ffi.NativeFunction<_c_zircon_dart_dl_initialize>>(
'zircon_dart_dl_initialize');
late final _dart_zircon_dart_dl_initialize _zircon_dart_dl_initialize =
_zircon_dart_dl_initialize_ptr
.asFunction<_dart_zircon_dart_dl_initialize>();
}
final class zircon_dart_byte_array_t extends ffi.Struct {
external ffi.Pointer<ffi.Uint8> data;
@ffi.Uint32()
external int length;
}
final class zircon_dart_handle_t extends ffi.Struct {
@ffi.Uint32()
external int handle;
}
final class zircon_dart_handle_pair_t extends ffi.Struct {
external ffi.Pointer<zircon_dart_handle_t> left;
external ffi.Pointer<zircon_dart_handle_t> right;
}
final class zircon_dart_handle_list_t extends ffi.Struct {
external ffi.Pointer<ffi.Void> data;
@ffi.Uint32()
external int size;
}
final class _Dart_Handle extends ffi.Opaque {}
typedef _c_zircon_dart_byte_array_create = ffi.Pointer<zircon_dart_byte_array_t>
Function(
ffi.Uint32 size,
);
typedef _dart_zircon_dart_byte_array_create
= ffi.Pointer<zircon_dart_byte_array_t> Function(
int size,
);
typedef _c_zircon_dart_byte_array_set_value = ffi.Void Function(
ffi.Pointer<zircon_dart_byte_array_t> arr,
ffi.Uint32 index,
ffi.Uint8 value,
);
typedef _dart_zircon_dart_byte_array_set_value = void Function(
ffi.Pointer<zircon_dart_byte_array_t> arr,
int index,
int value,
);
typedef _c_zircon_dart_byte_array_free = ffi.Void Function(
ffi.Pointer<zircon_dart_byte_array_t> arr,
);
typedef _dart_zircon_dart_byte_array_free = void Function(
ffi.Pointer<zircon_dart_byte_array_t> arr,
);
typedef _c_zircon_dart_handle_list_create
= ffi.Pointer<zircon_dart_handle_list_t> Function();
typedef _dart_zircon_dart_handle_list_create
= ffi.Pointer<zircon_dart_handle_list_t> Function();
typedef _c_zircon_dart_handle_list_append = ffi.Void Function(
ffi.Pointer<zircon_dart_handle_list_t> list,
ffi.Pointer<zircon_dart_handle_t> handle,
);
typedef _dart_zircon_dart_handle_list_append = void Function(
ffi.Pointer<zircon_dart_handle_list_t> list,
ffi.Pointer<zircon_dart_handle_t> handle,
);
typedef _c_zircon_dart_handle_list_free = ffi.Void Function(
ffi.Pointer<zircon_dart_handle_list_t> list,
);
typedef _dart_zircon_dart_handle_list_free = void Function(
ffi.Pointer<zircon_dart_handle_list_t> list,
);
typedef _c_zircon_dart_handle_is_valid = ffi.Int32 Function(
ffi.Pointer<zircon_dart_handle_t> handle,
);
typedef _dart_zircon_dart_handle_is_valid = int Function(
ffi.Pointer<zircon_dart_handle_t> handle,
);
typedef _c_zircon_dart_handle_close = ffi.Int32 Function(
ffi.Pointer<zircon_dart_handle_t> handle,
);
typedef _dart_zircon_dart_handle_close = int Function(
ffi.Pointer<zircon_dart_handle_t> handle,
);
typedef _c_zircon_dart_handle_free = ffi.Void Function(
ffi.Pointer<zircon_dart_handle_t> handle,
);
typedef _dart_zircon_dart_handle_free = void Function(
ffi.Pointer<zircon_dart_handle_t> handle,
);
typedef _c_zircon_dart_handle_pair_attach_finalizer = ffi.Int32 Function(
ffi.Handle object,
ffi.Pointer<ffi.Void> pointer,
ffi.IntPtr external_allocation_size,
);
typedef _dart_zircon_dart_handle_pair_attach_finalizer = int Function(
Object object,
ffi.Pointer<ffi.Void> pointer,
int external_allocation_size,
);
typedef _c_zircon_dart_handle_attach_finalizer = ffi.Int32 Function(
ffi.Handle object,
ffi.Pointer<ffi.Void> pointer,
ffi.IntPtr external_allocation_size,
);
typedef _dart_zircon_dart_handle_attach_finalizer = int Function(
Object object,
ffi.Pointer<ffi.Void> pointer,
int external_allocation_size,
);
typedef _c_zircon_dart_channel_create = ffi.Pointer<zircon_dart_handle_pair_t>
Function(
ffi.Uint32 options,
);
typedef _dart_zircon_dart_channel_create
= ffi.Pointer<zircon_dart_handle_pair_t> Function(
int options,
);
typedef _c_zircon_dart_channel_write = ffi.Int32 Function(
ffi.Pointer<zircon_dart_handle_t> handle,
ffi.Pointer<zircon_dart_byte_array_t> bytes,
ffi.Pointer<zircon_dart_handle_list_t> handles,
);
typedef _dart_zircon_dart_channel_write = int Function(
ffi.Pointer<zircon_dart_handle_t> handle,
ffi.Pointer<zircon_dart_byte_array_t> bytes,
ffi.Pointer<zircon_dart_handle_list_t> handles,
);
typedef _c_zircon_dart_clock_get_monotonic = ffi.Uint64 Function();
typedef _dart_zircon_dart_clock_get_monotonic = int Function();
typedef _c_zircon_dart_dl_initialize = ffi.Int32 Function(
ffi.Pointer<ffi.Void> initialize_api_dl_data,
);
typedef _dart_zircon_dart_dl_initialize = int Function(
ffi.Pointer<ffi.Void> initialize_api_dl_data,
);
| engine/shell/platform/fuchsia/dart-pkg/zircon_ffi/lib/zircon_ffi.dart/0 | {
"file_path": "engine/shell/platform/fuchsia/dart-pkg/zircon_ffi/lib/zircon_ffi.dart",
"repo_id": "engine",
"token_count": 5953
} | 377 |
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/compiled_action.gni")
import("//flutter/common/fuchsia_config.gni")
import("//flutter/tools/fuchsia/dart_kernel.gni")
import("$dart_src/build/dart/dart_action.gni")
dart_kernel("shim_kernel") {
main_dart = "shim.dart"
kernel_platform_files = "../kernel:kernel_platform_files"
product = false
aot = true
}
dart_kernel("shim_product_kernel") {
main_dart = "shim.dart"
kernel_platform_files = "../kernel:kernel_platform_files"
product = true
aot = true
}
template("create_aot_snapshot") {
assert(defined(invoker.product), "The parameter 'product' must be defined")
product_suffix = ""
if (invoker.product) {
product_suffix = "_product"
}
compiled_action("${target_name}_assembly") {
snapshot_assembly = "$target_gen_dir/aot${product_suffix}_vm_snapshot.S"
# gen_snapshot only needs this to go through the motions of setting up an isolate.
shim_target = ":shim${product_suffix}_kernel($host_toolchain)"
shim_kernel = get_label_info(shim_target, "target_gen_dir") +
"/shim${product_suffix}_kernel.dill"
inputs = [ shim_kernel ]
outputs = [ snapshot_assembly ]
deps = [ shim_target ]
if (invoker.product) {
tool = gen_snapshot_product
} else {
tool = gen_snapshot
}
args = [
"--deterministic",
"--snapshot_kind=vm-aot-assembly",
"--assembly=" + rebase_path(snapshot_assembly),
]
# No asserts in debug or release product.
# No asserts in release with flutter_profile=true (non-product)
# Yes asserts in non-product debug.
if (!invoker.product && (flutter_runtime_mode == "debug" || is_debug)) {
args += [ "--enable_asserts" ]
}
args += [ rebase_path(shim_kernel) ]
}
source_set(target_name) {
deps = [ ":${target_name}_assembly" ]
sources = [
"$target_gen_dir/aot${product_suffix}_vm_snapshot.S",
"snapshot.h",
]
}
}
create_aot_snapshot("dart_aot_snapshot_cc") {
product = false
}
create_aot_snapshot("dart_aot_product_snapshot_cc") {
product = true
}
| engine/shell/platform/fuchsia/dart_runner/embedder/BUILD.gn/0 | {
"file_path": "engine/shell/platform/fuchsia/dart_runner/embedder/BUILD.gn",
"repo_id": "engine",
"token_count": 876
} | 378 |
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//flutter/tools/fuchsia/dart/dart_component.gni")
import("//flutter/tools/fuchsia/fuchsia_archive.gni")
import("//flutter/tools/fuchsia/gn-sdk/src/gn_configs.gni")
group("tests") {
testonly = true
deps = [ ":dart-jit-runner-integration-test" ]
}
executable("dart-jit-runner-integration-test-bin") {
testonly = true
output_name = "dart-jit-runner-integration-test"
sources = [ "dart-jit-runner-integration-test.cc" ]
# This is needed for //flutter/third_party/googletest for linking zircon
# symbols.
libs = [ "${fuchsia_arch_root}/sysroot/lib/libzircon.so" ]
deps = [
"${fuchsia_sdk}/fidl/fuchsia.logger",
"${fuchsia_sdk}/fidl/fuchsia.tracing.provider",
"${fuchsia_sdk}/pkg/async",
"${fuchsia_sdk}/pkg/async-loop-testing",
"${fuchsia_sdk}/pkg/fidl_cpp",
"${fuchsia_sdk}/pkg/sys_component_cpp_testing",
"${fuchsia_sdk}/pkg/zx",
"../dart_echo_server:jit_echo_package",
"//flutter/fml",
"//flutter/shell/platform/fuchsia/dart_runner/fidl:dart_test",
"//flutter/third_party/googletest:gtest",
"//flutter/third_party/googletest:gtest_main",
]
}
fuchsia_test_archive("dart-jit-runner-integration-test") {
deps = [ ":dart-jit-runner-integration-test-bin" ]
binary = "$target_name"
cml_file = rebase_path("meta/$target_name.cml")
}
| engine/shell/platform/fuchsia/dart_runner/tests/startup_integration_test/dart_jit_runner/BUILD.gn/0 | {
"file_path": "engine/shell/platform/fuchsia/dart_runner/tests/startup_integration_test/dart_jit_runner/BUILD.gn",
"repo_id": "engine",
"token_count": 630
} | 379 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/fuchsia/flutter/canvas_spy.h"
namespace flutter {
CanvasSpy::CanvasSpy(SkCanvas* target_canvas) {
SkISize canvas_size = target_canvas->getBaseLayerSize();
n_way_canvas_ =
std::make_unique<SkNWayCanvas>(canvas_size.width(), canvas_size.height());
did_draw_canvas_ = std::make_unique<DidDrawCanvas>(canvas_size.width(),
canvas_size.height());
n_way_canvas_->addCanvas(target_canvas);
n_way_canvas_->addCanvas(did_draw_canvas_.get());
adapter_.set_canvas(n_way_canvas_.get());
}
DlCanvas* CanvasSpy::GetSpyingCanvas() {
return &adapter_;
}
SkCanvas* CanvasSpy::GetRawSpyingCanvas() {
return n_way_canvas_.get();
};
DidDrawCanvas::DidDrawCanvas(int width, int height)
: SkCanvasVirtualEnforcer<SkNoDrawCanvas>(width, height) {}
DidDrawCanvas::~DidDrawCanvas() {}
void DidDrawCanvas::MarkDrawIfNonTransparentPaint(const SkPaint& paint) {
bool isTransparent = paint.getAlpha() == 0;
did_draw_ |= !isTransparent;
}
bool CanvasSpy::DidDrawIntoCanvas() {
return did_draw_canvas_->DidDrawIntoCanvas();
}
bool DidDrawCanvas::DidDrawIntoCanvas() {
return did_draw_;
}
void DidDrawCanvas::willSave() {}
SkCanvas::SaveLayerStrategy DidDrawCanvas::getSaveLayerStrategy(
const SaveLayerRec& rec) {
return kNoLayer_SaveLayerStrategy;
}
bool DidDrawCanvas::onDoSaveBehind(const SkRect* bounds) {
return false;
}
void DidDrawCanvas::willRestore() {}
void DidDrawCanvas::didConcat44(const SkM44&) {}
void DidDrawCanvas::didScale(SkScalar, SkScalar) {}
void DidDrawCanvas::didTranslate(SkScalar, SkScalar) {}
void DidDrawCanvas::onClipRect(const SkRect& rect,
SkClipOp op,
ClipEdgeStyle edgeStyle) {}
void DidDrawCanvas::onClipRRect(const SkRRect& rrect,
SkClipOp op,
ClipEdgeStyle edgeStyle) {}
void DidDrawCanvas::onClipPath(const SkPath& path,
SkClipOp op,
ClipEdgeStyle edgeStyle) {}
void DidDrawCanvas::onClipRegion(const SkRegion& deviceRgn, SkClipOp op) {}
void DidDrawCanvas::onDrawPaint(const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawBehind(const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawPoints(PointMode mode,
size_t count,
const SkPoint pts[],
const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawRect(const SkRect& rect, const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawRegion(const SkRegion& region, const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawOval(const SkRect& rect, const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawArc(const SkRect& rect,
SkScalar startAngle,
SkScalar sweepAngle,
bool useCenter,
const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawRRect(const SkRRect& rrect, const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawDRRect(const SkRRect& outer,
const SkRRect& inner,
const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawPath(const SkPath& path, const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
#ifdef SK_SUPPORT_LEGACY_ONDRAWIMAGERECT
void DidDrawCanvas::onDrawImage(const SkImage* image,
SkScalar left,
SkScalar top,
const SkPaint* paint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawImageRect(const SkImage* image,
const SkRect* src,
const SkRect& dst,
const SkPaint* paint,
SrcRectConstraint constraint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawImageLattice(const SkImage* image,
const Lattice& lattice,
const SkRect& dst,
const SkPaint* paint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawAtlas(const SkImage* image,
const SkRSXform xform[],
const SkRect tex[],
const SkColor colors[],
int count,
SkBlendMode bmode,
const SkRect* cull,
const SkPaint* paint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawEdgeAAImageSet(const ImageSetEntry set[],
int count,
const SkPoint dstClips[],
const SkMatrix preViewMatrices[],
const SkPaint* paint,
SrcRectConstraint constraint) {
did_draw_ = true;
}
#endif
void DidDrawCanvas::onDrawImage2(const SkImage* image,
SkScalar left,
SkScalar top,
const SkSamplingOptions&,
const SkPaint* paint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawImageRect2(const SkImage* image,
const SkRect& src,
const SkRect& dst,
const SkSamplingOptions&,
const SkPaint* paint,
SrcRectConstraint constraint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawImageLattice2(const SkImage* image,
const Lattice& lattice,
const SkRect& dst,
SkFilterMode,
const SkPaint* paint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawTextBlob(const SkTextBlob* blob,
SkScalar x,
SkScalar y,
const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawPicture(const SkPicture* picture,
const SkMatrix* matrix,
const SkPaint* paint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawDrawable(SkDrawable* drawable,
const SkMatrix* matrix) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawVerticesObject(const SkVertices* vertices,
SkBlendMode bmode,
const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawPatch(const SkPoint cubics[12],
const SkColor colors[4],
const SkPoint texCoords[4],
SkBlendMode bmode,
const SkPaint& paint) {
MarkDrawIfNonTransparentPaint(paint);
}
void DidDrawCanvas::onDrawAtlas2(const SkImage* image,
const SkRSXform xform[],
const SkRect tex[],
const SkColor colors[],
int count,
SkBlendMode bmode,
const SkSamplingOptions&,
const SkRect* cull,
const SkPaint* paint) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawShadowRec(const SkPath& path,
const SkDrawShadowRec& rec) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawAnnotation(const SkRect& rect,
const char key[],
SkData* data) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawEdgeAAQuad(const SkRect& rect,
const SkPoint clip[4],
SkCanvas::QuadAAFlags aa,
const SkColor4f& color,
SkBlendMode mode) {
did_draw_ = true;
}
void DidDrawCanvas::onDrawEdgeAAImageSet2(const ImageSetEntry set[],
int count,
const SkPoint dstClips[],
const SkMatrix preViewMatrices[],
const SkSamplingOptions&,
const SkPaint* paint,
SrcRectConstraint constraint) {
did_draw_ = true;
}
} // namespace flutter
| engine/shell/platform/fuchsia/flutter/canvas_spy.cc/0 | {
"file_path": "engine/shell/platform/fuchsia/flutter/canvas_spy.cc",
"repo_id": "engine",
"token_count": 5291
} | 380 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_FLUTTER_RUNNER_PRODUCT_CONFIGURATION_H_
#define FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_FLUTTER_RUNNER_PRODUCT_CONFIGURATION_H_
#include <string>
namespace flutter_runner {
class FlutterRunnerProductConfiguration {
public:
FlutterRunnerProductConfiguration() {}
explicit FlutterRunnerProductConfiguration(std::string json_string);
bool get_intercept_all_input() { return intercept_all_input_; }
bool software_rendering() { return software_rendering_; }
bool enable_shader_warmup() { return enable_shader_warmup_; }
bool enable_shader_warmup_dart_hooks() {
return enable_shader_warmup_dart_hooks_;
}
private:
bool intercept_all_input_ = false;
bool software_rendering_ = false;
bool enable_shader_warmup_ = false;
bool enable_shader_warmup_dart_hooks_ = true;
};
} // namespace flutter_runner
#endif // FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_FLUTTER_RUNNER_PRODUCT_CONFIGURATION_H_
| engine/shell/platform/fuchsia/flutter/flutter_runner_product_configuration.h/0 | {
"file_path": "engine/shell/platform/fuchsia/flutter/flutter_runner_product_configuration.h",
"repo_id": "engine",
"token_count": 397
} | 381 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_KEYBOARD_H_
#define FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_KEYBOARD_H_
#include <fuchsia/ui/input3/cpp/fidl.h>
namespace flutter_runner {
// Keyboard handles the keyboard signals from fuchsia.ui.input3. Specifically,
// input3 has no notion of a code point, and does not track stateful versions
// of the modifier keys.
class Keyboard final {
public:
explicit Keyboard();
// Consumes the given keyboard event. Keyboard will adjust the modifier
// state based on the info given in the event. Returns true if the event has
// been integrated into the internal state successfully, or false otherwise.
bool ConsumeEvent(fuchsia::ui::input3::KeyEvent event);
// Gets the currently active modifier keys.
uint32_t Modifiers();
// Gets the last encountered code point. The reported code point depends on
// the state of the modifier keys.
uint32_t LastCodePoint();
// Gets the last encountered HID usage. This is a 32-bit number, with the
// upper 16 bits equal to `LastHidUsagePage()`, and the lower 16 bits equal
// to `LastHIDUsageID()`.
//
// The key corresponding to A will have the usage 0x7004. This function will
// return 0x7004 in that case.
uint32_t LastHIDUsage();
// Gets the last encountered HID usage page.
//
// The key corresponding to A will have the usage 0x7004. This function will
// return 0x7 in that case.
uint16_t LastHIDUsagePage();
// Gets the last encountered HID usage ID.
//
// The key corresponding to A will have the usage 0x7004. This function will
// return 0x4 in that case.
uint16_t LastHIDUsageID();
private:
// Return true if any level shift is active.
bool IsShift();
// Returns true if the last key event was about a key that may have a code
// point associated.
bool IsKeys();
// Returns the value of the last key as a uint32_t.
// If there isn't such a value (as in the case of on-screen keyboards), this
// will return a 0;
uint32_t GetLastKey();
// Set to false until any event is received.
bool any_events_received_ : 1;
// The flags below show the state of the keyboard modifiers after the last
// event has been processed. Stateful keys remain in the same state after
// a release and require an additional press to toggle.
bool stateful_caps_lock_ : 1;
bool left_shift_ : 1;
bool right_shift_ : 1;
bool left_alt_ : 1;
bool right_alt_ : 1;
bool left_ctrl_ : 1;
bool right_ctrl_ : 1;
// The last received key event. If any_events_received_ is not set, this is
// not valid.
fuchsia::ui::input3::KeyEvent last_event_;
};
} // namespace flutter_runner
#endif // FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_KEYBOARD_H_
| engine/shell/platform/fuchsia/flutter/keyboard.h/0 | {
"file_path": "engine/shell/platform/fuchsia/flutter/keyboard.h",
"repo_id": "engine",
"token_count": 882
} | 382 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <fuchsia/ui/pointerinjector/cpp/fidl.h>
#include <fuchsia/ui/views/cpp/fidl.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <lib/async-loop/cpp/loop.h>
#include <lib/async-loop/default.h>
#include <lib/async/cpp/task.h>
#include <lib/zx/eventpair.h>
#include <lib/fidl/cpp/binding_set.h>
#include "pointer_injector_delegate.h"
#include "tests/fakes/mock_injector_registry.h"
#include "tests/fakes/platform_message.h"
namespace flutter_runner::testing {
using fup_DeviceType = fuchsia::ui::pointerinjector::DeviceType;
using fup_DispatchPolicy = fuchsia::ui::pointerinjector::DispatchPolicy;
using fup_EventPhase = fuchsia::ui::pointerinjector::EventPhase;
using fup_RegistryHandle = fuchsia::ui::pointerinjector::RegistryHandle;
using fuv_ViewRef = fuchsia::ui::views::ViewRef;
namespace {
// clang-format off
static constexpr std::array<float, 9> kIdentityMatrix = {
1, 0, 0, // column one
0, 1, 0, // column two
0, 0, 1, // column three
};
// clang-format on
rapidjson::Value ParsePlatformMessage(std::string json) {
rapidjson::Document document;
document.Parse(json);
if (document.HasParseError() || !document.IsObject()) {
FML_LOG(ERROR) << "Could not parse document";
return rapidjson::Value();
}
return document.GetObject();
}
zx_koid_t ExtractKoid(const zx::object_base& object) {
zx_info_handle_basic_t info{};
if (object.get_info(ZX_INFO_HANDLE_BASIC, &info, sizeof(info), nullptr,
nullptr) != ZX_OK) {
return ZX_KOID_INVALID; // no info
}
return info.koid;
}
zx_koid_t ExtractKoid(const fuv_ViewRef& view_ref) {
return ExtractKoid(view_ref.reference);
}
class PlatformMessageBuilder {
public:
PlatformMessageBuilder& SetViewId(uint64_t view_id) {
view_id_ = view_id;
return *this;
}
PlatformMessageBuilder& SetPointerX(float x) {
pointer_x_ = x;
return *this;
}
PlatformMessageBuilder& SetPointerY(float y) {
pointer_y_ = y;
return *this;
}
PlatformMessageBuilder& SetPhase(int phase) {
phase_ = phase;
return *this;
}
PlatformMessageBuilder& SetPointerId(int pointer_id) {
pointer_id_ = pointer_id;
return *this;
}
PlatformMessageBuilder& SetTraceFlowId(int trace_flow_id) {
trace_flow_id_ = trace_flow_id;
return *this;
}
PlatformMessageBuilder& SetLogicalWidth(float width) {
width_ = width;
return *this;
}
PlatformMessageBuilder& SetLogicalHeight(float height) {
height_ = height;
return *this;
}
PlatformMessageBuilder& SetTimestamp(int timestamp) {
timestamp_ = timestamp;
return *this;
}
rapidjson::Value Build() {
std::ostringstream message;
message << "{" << " \"method\":\""
<< PointerInjectorDelegate::kPointerInjectorMethodPrefix << "\","
<< " \"args\": {" << " \"viewId\":" << view_id_ << ","
<< " \"x\":" << pointer_x_ << ","
<< " \"y\":" << pointer_y_ << ","
<< " \"phase\":" << phase_ << ","
<< " \"pointerId\":" << pointer_id_ << ","
<< " \"traceFlowId\":" << trace_flow_id_ << ","
<< " \"viewRef\":" << view_ref_.reference.get() << ","
<< " \"logicalWidth\":" << width_ << ","
<< " \"logicalHeight\":" << height_ << ","
<< " \"timestamp\":" << timestamp_ << " }" << "}";
return ParsePlatformMessage(message.str());
}
private:
uint64_t view_id_ = 0;
float pointer_x_ = 0.f, pointer_y_ = 0.f;
int phase_ = 1, pointer_id_ = 0, trace_flow_id_ = 0;
fuv_ViewRef view_ref_;
float width_ = 0.f, height_ = 0.f;
int timestamp_ = 0;
};
} // namespace
class PointerInjectorDelegateTest : public ::testing::Test,
public ::testing::WithParamInterface<bool> {
protected:
PointerInjectorDelegateTest()
: loop_(&kAsyncLoopConfigAttachToCurrentThread) {}
// TODO(fxbug.dev/104285): Replace the RunLoop methods with the one provided
// by the sdk.
void RunLoopUntilIdle() { loop_.RunUntilIdle(); }
bool RunGivenLoopWithTimeout(async::Loop* loop, zx::duration timeout) {
// This cannot be a local variable because the delayed task below can
// execute after this function returns.
auto canceled = std::make_shared<bool>(false);
bool timed_out = false;
async::PostDelayedTask(
loop->dispatcher(),
[loop, canceled, &timed_out] {
if (*canceled) {
return;
}
timed_out = true;
loop->Quit();
},
timeout);
loop->Run();
loop->ResetQuit();
if (!timed_out) {
*canceled = true;
}
return timed_out;
}
bool RunLoopWithTimeoutOrUntil(fit::function<bool()> condition,
zx::duration timeout,
zx::duration step) {
const zx::time timeout_deadline = zx::deadline_after(timeout);
while (zx::clock::get_monotonic() < timeout_deadline &&
loop_.GetState() == ASYNC_LOOP_RUNNABLE) {
if (condition()) {
loop_.ResetQuit();
return true;
}
if (step == zx::duration::infinite()) {
// Performs a single unit of work, possibly blocking until there is work
// to do or the timeout deadline arrives.
loop_.Run(timeout_deadline, true);
} else {
// Performs work until the step deadline arrives.
RunGivenLoopWithTimeout(&loop_, step);
}
}
loop_.ResetQuit();
return condition();
}
void RunLoopUntil(fit::function<bool()> condition,
zx::duration step = zx::msec(10)) {
RunLoopWithTimeoutOrUntil(std::move(condition), zx::duration::infinite(),
step);
}
void SetUp() override {
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ASSERT_EQ(status, ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
host_view_ref_ = std::move(view_ref);
fup_RegistryHandle registry;
registry_ = std::make_unique<MockInjectorRegistry>(registry.NewRequest());
fuv_ViewRef host_view_ref_clone;
fidl::Clone(host_view_ref_, &host_view_ref_clone);
pointer_injector_delegate_ = std::make_unique<PointerInjectorDelegate>(
std::move(registry), std::move(host_view_ref_clone));
}
void CreateView(uint64_t view_id,
std::optional<fuv_ViewRef> view_ref = std::nullopt) {
fuv_ViewRef ref;
if (view_ref.has_value()) {
ref = std::move(*view_ref);
} else {
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ASSERT_EQ(status, ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
ref = std::move(view_ref);
}
pointer_injector_delegate_->OnCreateView(view_id, std::move(ref));
}
std::unique_ptr<PointerInjectorDelegate> pointer_injector_delegate_;
std::unique_ptr<MockInjectorRegistry> registry_;
fuv_ViewRef host_view_ref_;
private:
async::Loop loop_;
};
TEST_P(PointerInjectorDelegateTest, IncorrectPlatformMessage_ShouldFail) {
const uint64_t view_id = 1;
// Create a view.
CreateView(view_id);
// A platform message in incorrect JSON format should fail.
{
auto response = FakePlatformMessageResponse::Create();
EXPECT_FALSE(pointer_injector_delegate_->HandlePlatformMessage(
ParsePlatformMessage("{Incorrect Json}"), response));
}
// |PointerInjectorDelegate| only handles "View.Pointerinjector.inject"
// platform messages.
{
auto response = FakePlatformMessageResponse::Create();
EXPECT_FALSE(pointer_injector_delegate_->HandlePlatformMessage(
ParsePlatformMessage("{\"method\":\"View.focus.getCurrent\"}"),
response));
}
// A platform message with no args should fail.
{
auto response = FakePlatformMessageResponse::Create();
EXPECT_FALSE(pointer_injector_delegate_->HandlePlatformMessage(
ParsePlatformMessage("{\"method\":\"View.Pointerinjector.inject\"}"),
response));
}
}
TEST_P(PointerInjectorDelegateTest, ViewsReceiveInjectedEvents) {
const uint64_t num_events = 150;
// Inject |num_events| platform messages for view 1.
{
const uint64_t view_id = 1;
CreateView(view_id);
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ASSERT_EQ(status, ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
for (size_t i = 0; i < num_events; i++) {
auto response = FakePlatformMessageResponse::Create();
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder().SetViewId(view_id).Build(), response));
response->ExpectCompleted("[0]");
}
}
// Inject |num_events| platform messages for view 2.
{
const uint64_t view_id = 2;
CreateView(view_id);
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ASSERT_EQ(status, ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
for (size_t i = 0; i < num_events; i++) {
auto response = FakePlatformMessageResponse::Create();
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder().SetViewId(view_id).Build(), response));
response->ExpectCompleted("[0]");
}
}
// The mock Pointerinjector registry server receives |num_events| pointer
// events from |f.u.p.Device.Inject| calls for each view.
RunLoopUntil(
[this] { return registry_->num_events_received() == 2 * num_events; });
// The mock Pointerinjector registry server receives a
// |f.u.p.Registry.Register| call for each view.
EXPECT_TRUE(registry_->num_register_calls() == 2);
}
TEST_P(PointerInjectorDelegateTest,
ViewsDontReceivePointerEventsBeforeCreation) {
const uint64_t num_events = 150;
const uint64_t view_id_1 = 1;
// Inject |num_events| platform messages for |view_id_1|.
{
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ASSERT_EQ(status, ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
for (size_t i = 0; i < num_events; i++) {
auto response = FakePlatformMessageResponse::Create();
// The platform message is *silently* accepted for non-existent views, in
// order to cleanly handle the lifecycle case where the child view is
// forcibly killed. By doing so, products avoid "MissingPluginException"
// log spam.
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder().SetViewId(view_id_1).Build(), response));
}
}
const uint64_t view_id_2 = 2;
// Inject |num_events| platform messages for |view_id_2|.
{
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ASSERT_EQ(status, ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
for (size_t i = 0; i < num_events; i++) {
auto response = FakePlatformMessageResponse::Create();
// The platform message is *silently* accepted for non-existent views, in
// order to cleanly handle the lifecycle case where the child view is
// forcibly killed. By doing so, products avoid "MissingPluginException"
// log spam.
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder().SetViewId(view_id_2).Build(), response));
}
}
RunLoopUntilIdle();
// The views do not receive any pointer events till they get created.
EXPECT_TRUE(registry_->num_events_received() == 0);
}
// PointerInjectorDelegate should generate a correct |f.u.p.Config| from a
// platform message.
TEST_P(PointerInjectorDelegateTest, ValidRegistrationConfigTest) {
const uint64_t view_id = 1;
const float x = 2.f, y = 2.f, width = 5.f, height = 5.f;
const int phase = 2, pointer_id = 5, trace_flow_id = 5, timestamp = 10;
auto response = FakePlatformMessageResponse::Create();
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ZX_ASSERT(status == ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
// Create the view.
fuv_ViewRef view_ref_clone;
fidl::Clone(view_ref, &view_ref_clone);
CreateView(view_id, std::move(view_ref_clone));
// Inject a platform message.
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder()
.SetViewId(view_id)
.SetPointerX(x)
.SetPointerY(y)
.SetPhase(phase)
.SetPointerId(pointer_id)
.SetTraceFlowId(trace_flow_id)
.SetLogicalWidth(width)
.SetLogicalHeight(height)
.SetTimestamp(timestamp)
.Build(),
response));
response->ExpectCompleted("[0]");
// The mock Pointerinjector registry server receives a pointer event from
// |f.u.p.Device.Inject| call for the view.
RunLoopUntil([this] { return registry_->num_events_received() == 1; });
// The mock Pointerinjector registry server receives a
// |f.u.p.Registry.Register| call for the view.
ASSERT_TRUE(registry_->num_register_calls() == 1);
const auto& config = registry_->config();
ASSERT_TRUE(config.has_device_id());
EXPECT_EQ(config.device_id(), 1u);
ASSERT_TRUE(config.has_device_type());
EXPECT_EQ(config.device_type(), fup_DeviceType::TOUCH);
ASSERT_TRUE(config.has_dispatch_policy());
EXPECT_EQ(config.dispatch_policy(), fup_DispatchPolicy::EXCLUSIVE_TARGET);
ASSERT_TRUE(config.has_context());
ASSERT_TRUE(config.context().is_view());
EXPECT_EQ(ExtractKoid(config.context().view()), ExtractKoid(host_view_ref_));
ASSERT_TRUE(config.has_target());
ASSERT_TRUE(config.target().is_view());
EXPECT_EQ(ExtractKoid(config.target().view()), ExtractKoid(view_ref));
ASSERT_TRUE(config.has_viewport());
ASSERT_TRUE(config.viewport().has_viewport_to_context_transform());
EXPECT_EQ(config.viewport().viewport_to_context_transform(), kIdentityMatrix);
std::array<std::array<float, 2>, 2> extents{{{0, 0}, {width, height}}};
ASSERT_TRUE(config.viewport().has_extents());
EXPECT_EQ(config.viewport().extents(), extents);
}
// PointerInjectorDelegate generates a correct f.u.p.Event from the platform
// message.
TEST_P(PointerInjectorDelegateTest, ValidPointerEventTest) {
const uint64_t view_id = 1;
const float x = 2.f, y = 2.f, width = 5.f, height = 5.f;
const int phase = 2, pointer_id = 5, trace_flow_id = 5, timestamp = 10;
auto response = FakePlatformMessageResponse::Create();
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ZX_ASSERT(status == ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
// Create the view.
fuv_ViewRef view_ref_clone;
fidl::Clone(view_ref, &view_ref_clone);
CreateView(view_id, std::move(view_ref_clone));
// Inject a platform message.
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder()
.SetViewId(view_id)
.SetPointerX(x)
.SetPointerY(y)
.SetPhase(phase)
.SetPointerId(pointer_id)
.SetTraceFlowId(trace_flow_id)
.SetLogicalWidth(width)
.SetLogicalHeight(height)
.SetTimestamp(timestamp)
.Build(),
response));
response->ExpectCompleted("[0]");
// The mock Pointerinjector registry server receives a pointer event from
// |f.u.p.Device.Inject| call for the view.
RunLoopUntil([this] { return registry_->num_events_received() == 1; });
// The mock Pointerinjector registry server receives a
// |f.u.p.Registry.Register| call for the view.
ASSERT_TRUE(registry_->num_register_calls() == 1);
const auto& events = registry_->events();
ASSERT_EQ(events.size(), 1u);
const auto& event = events[0];
ASSERT_TRUE(event.has_timestamp());
EXPECT_EQ(event.timestamp(), timestamp);
ASSERT_TRUE(event.has_trace_flow_id());
EXPECT_EQ(event.trace_flow_id(), static_cast<uint64_t>(trace_flow_id));
ASSERT_TRUE(event.has_data());
ASSERT_TRUE(event.data().is_pointer_sample());
const auto& pointer_sample = event.data().pointer_sample();
ASSERT_TRUE(pointer_sample.has_pointer_id());
ASSERT_TRUE(pointer_sample.has_phase());
ASSERT_TRUE(pointer_sample.has_position_in_viewport());
EXPECT_EQ(pointer_sample.pointer_id(), static_cast<uint32_t>(pointer_id));
EXPECT_EQ(pointer_sample.phase(), static_cast<fup_EventPhase>(phase));
EXPECT_THAT(pointer_sample.position_in_viewport(),
::testing::ElementsAre(x, y));
}
TEST_P(PointerInjectorDelegateTest, DestroyedViewsDontGetPointerEvents) {
const uint64_t view_id = 1, num_events = 150;
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ZX_ASSERT(status == ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
// Create the view.
CreateView(view_id);
// Inject |num_events| platform messages.
for (size_t i = 0; i < num_events; i++) {
auto response = FakePlatformMessageResponse::Create();
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder().SetViewId(view_id).Build(), response));
response->ExpectCompleted("[0]");
}
// Destroy the view.
pointer_injector_delegate_->OnDestroyView(view_id);
// The view does not receive |num_events| pointer events as it gets destroyed
// before all the pointer events could be dispatched.
const zx::duration timeout = zx::sec(1), step = zx::msec(10);
EXPECT_FALSE(RunLoopWithTimeoutOrUntil(
[this] { return registry_->num_events_received() == num_events; },
timeout, step));
EXPECT_LT(registry_->num_events_received(), num_events);
}
TEST_P(PointerInjectorDelegateTest, ViewsGetPointerEventsInFIFO) {
const uint64_t view_id = 1, num_events = 150;
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ZX_ASSERT(status == ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
// Create the view.
CreateView(view_id);
// Inject |num_events| platform messages.
for (size_t i = 0; i < num_events; i++) {
auto response = FakePlatformMessageResponse::Create();
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder()
.SetViewId(view_id)
.SetPointerId(static_cast<uint32_t>(i))
.Build(),
response));
response->ExpectCompleted("[0]");
}
// The mock Pointerinjector registry server receives |num_events| pointer
// events from |f.u.p.Device.Inject| call for the view.
RunLoopUntil(
[this] { return registry_->num_events_received() == num_events; });
// The mock Pointerinjector registry server receives a
// |f.u.p.Registry.Register| call for the view.
ASSERT_TRUE(registry_->num_register_calls() == 1);
auto& events = registry_->events();
// The view should receive the pointer events in a FIFO order. As we injected
// platform messages with an increasing |pointer_id|, the received pointer
// events should also have the |pointer_id| in an increasing order.
for (size_t i = 0; i < events.size() - 1; i++) {
ASSERT_TRUE(events[i].has_data());
ASSERT_TRUE(events[i + 1].has_data());
ASSERT_TRUE(events[i].data().is_pointer_sample());
ASSERT_TRUE(events[i + 1].data().is_pointer_sample());
const auto& pointer_sample_1 = events[i].data().pointer_sample();
const auto& pointer_sample_2 = events[i + 1].data().pointer_sample();
ASSERT_TRUE(pointer_sample_1.has_pointer_id());
ASSERT_TRUE(pointer_sample_2.has_pointer_id());
EXPECT_TRUE(pointer_sample_1.pointer_id() < pointer_sample_2.pointer_id());
}
}
TEST_P(PointerInjectorDelegateTest, DeviceRetriesRegisterWhenClosed) {
const uint64_t view_id = 1;
const int pointer_id = 1;
fuchsia::ui::views::ViewRefControl view_ref_control;
fuchsia::ui::views::ViewRef view_ref;
auto status = zx::eventpair::create(
/*options*/ 0u, &view_ref_control.reference, &view_ref.reference);
ZX_ASSERT(status == ZX_OK);
view_ref_control.reference.replace(
ZX_DEFAULT_EVENTPAIR_RIGHTS & (~ZX_RIGHT_DUPLICATE),
&view_ref_control.reference);
view_ref.reference.replace(ZX_RIGHTS_BASIC, &view_ref.reference);
auto response = FakePlatformMessageResponse::Create();
auto response_2 = FakePlatformMessageResponse::Create();
// Create the view.
fuv_ViewRef view_ref_clone;
fidl::Clone(view_ref, &view_ref_clone);
CreateView(view_id, std::move(view_ref_clone));
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder()
.SetViewId(view_id)
.SetPointerId(pointer_id)
.Build(),
response));
response->ExpectCompleted("[0]");
// The mock Pointerinjector registry server receives a pointer event from
// |f.u.p.Device.Inject| call for the view.
RunLoopUntil([this] { return registry_->num_events_received() == 1; });
// The mock Pointerinjector registry server receives a
// |f.u.p.Registry.Register| call for the view.
ASSERT_TRUE(registry_->num_register_calls() == 1);
// Close the device channel.
registry_->ClearBindings();
RunLoopUntilIdle();
EXPECT_TRUE(pointer_injector_delegate_->HandlePlatformMessage(
PlatformMessageBuilder()
.SetViewId(view_id)
.SetPointerId(pointer_id)
.Build(),
response_2));
response_2->ExpectCompleted("[0]");
// The mock Pointerinjector registry server receives a pointer event from
// |f.u.p.Device.Inject| call for the view.
RunLoopUntil([this] { return registry_->num_events_received() == 2; });
// The device tries to register again as the channel got closed.
ASSERT_TRUE(registry_->num_register_calls() == 2);
}
INSTANTIATE_TEST_SUITE_P(PointerInjectorDelegateParameterizedTest,
PointerInjectorDelegateTest,
::testing::Bool());
} // namespace flutter_runner::testing
| engine/shell/platform/fuchsia/flutter/pointer_injector_delegate_unittest.cc/0 | {
"file_path": "engine/shell/platform/fuchsia/flutter/pointer_injector_delegate_unittest.cc",
"repo_id": "engine",
"token_count": 9804
} | 383 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_SURFACE_PRODUCER_H_
#define FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_SURFACE_PRODUCER_H_
#include <fuchsia/ui/composition/cpp/fidl.h>
#include <lib/zx/event.h>
#include <functional>
#include <memory>
#include <vector>
#include "third_party/skia/include/core/SkSize.h"
#include "third_party/skia/include/core/SkSurface.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
namespace flutter_runner {
using ReleaseImageCallback = std::function<void()>;
// This represents an abstract notion of a "rendering surface", which is a
// destination for pixels drawn by some rendering engine. In this case, the
// rendering engine is Skia combined with one of its rendering backends.
//
// In addition to allowing Skia-based drawing via `GetSkiaSurface`, this
// rendering surface can be shared with Scenic via `SetImageId`,
// `GetBufferCollectionImportToken`, `GetAcquireFene`, and `GetReleaseFence`.
class SurfaceProducerSurface {
public:
virtual ~SurfaceProducerSurface() = default;
virtual bool IsValid() const = 0;
virtual SkISize GetSize() const = 0;
virtual void SetImageId(uint32_t image_id) = 0;
virtual uint32_t GetImageId() = 0;
virtual sk_sp<SkSurface> GetSkiaSurface() const = 0;
virtual fuchsia::ui::composition::BufferCollectionImportToken
GetBufferCollectionImportToken() = 0;
virtual zx::event GetAcquireFence() = 0;
virtual zx::event GetReleaseFence() = 0;
virtual void SetReleaseImageCallback(
ReleaseImageCallback release_image_callback) = 0;
virtual size_t AdvanceAndGetAge() = 0;
virtual bool FlushSessionAcquireAndReleaseEvents() = 0;
virtual void SignalWritesFinished(
const std::function<void(void)>& on_writes_committed) = 0;
};
// This represents an abstract notion of "surface producer", which serves as a
// source for `SurfaceProducerSurface`s. Produces surfaces should be returned
// to this `SurfaceProducer` via `SubmitSurfaces`, at which point they will be
// shared with Scenic.
class SurfaceProducer {
public:
virtual ~SurfaceProducer() = default;
virtual GrDirectContext* gr_context() const = 0;
virtual std::unique_ptr<SurfaceProducerSurface> ProduceOffscreenSurface(
const SkISize& size) = 0;
virtual std::unique_ptr<SurfaceProducerSurface> ProduceSurface(
const SkISize& size) = 0;
virtual void SubmitSurfaces(
std::vector<std::unique_ptr<SurfaceProducerSurface>> surfaces) = 0;
};
} // namespace flutter_runner
#endif // FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_SURFACE_PRODUCER_H_
| engine/shell/platform/fuchsia/flutter/surface_producer.h/0 | {
"file_path": "engine/shell/platform/fuchsia/flutter/surface_producer.h",
"repo_id": "engine",
"token_count": 886
} | 384 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_TESTS_FAKES_TOUCH_SOURCE_H_
#define FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_TESTS_FAKES_TOUCH_SOURCE_H_
#include <fuchsia/ui/pointer/cpp/fidl.h>
#include <optional>
#include <vector>
#include "flutter/fml/logging.h"
namespace flutter_runner::testing {
// A test stub to act as the protocol server. A test can control what is sent
// back by this server implementation, via the ScheduleCallback call.
class FakeTouchSource : public fuchsia::ui::pointer::TouchSource {
public:
// |fuchsia.ui.pointer.TouchSource|
void Watch(std::vector<fuchsia::ui::pointer::TouchResponse> responses,
TouchSource::WatchCallback callback) override {
responses_ = std::move(responses);
callback_ = std::move(callback);
}
// Have the server issue events to the client's hanging-get Watch call.
void ScheduleCallback(std::vector<fuchsia::ui::pointer::TouchEvent> events) {
FML_CHECK(callback_) << "require a valid WatchCallback";
callback_(std::move(events));
}
// Allow the test to observe what the client uploaded on the next Watch call.
std::optional<std::vector<fuchsia::ui::pointer::TouchResponse>>
UploadedResponses() {
auto responses = std::move(responses_);
responses_.reset();
return responses;
}
private:
// |fuchsia.ui.pointer.TouchSource|
void UpdateResponse(fuchsia::ui::pointer::TouchInteractionId ixn,
fuchsia::ui::pointer::TouchResponse response,
TouchSource::UpdateResponseCallback callback) override {
FML_UNREACHABLE();
}
// Client uploads responses to server.
std::optional<std::vector<fuchsia::ui::pointer::TouchResponse>> responses_;
// Client-side logic to invoke on Watch() call's return. A test triggers it
// with ScheduleCallback().
TouchSource::WatchCallback callback_;
};
} // namespace flutter_runner::testing
#endif // FLUTTER_SHELL_PLATFORM_FUCHSIA_FLUTTER_TESTS_FAKES_TOUCH_SOURCE_H_
| engine/shell/platform/fuchsia/flutter/tests/fakes/touch_source.h/0 | {
"file_path": "engine/shell/platform/fuchsia/flutter/tests/fakes/touch_source.h",
"repo_id": "engine",
"token_count": 730
} | 385 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.