未验证 提交 b3ebb6dd 编写于 作者: F Ferhat 提交者: GitHub

Implement ImageShader for html Canvas (#26384)

上级 80b3f95d
repository: https://github.com/flutter/goldens.git
revision: 4d00d1a0f1c0bc123814919a45ef2c2f57ed21ec
revision: bdf2f896cec4fa1589c277a5fbb2c73622924375
......@@ -4,7 +4,8 @@
part of engine;
/// Allocates and caches 0 or more canvas(s) for [BitmapCanvas].
/// Renders picture to a CanvasElement by allocating and caching 0 or more
/// canvas(s) for [BitmapCanvas].
///
/// [BitmapCanvas] signals allocation of first canvas using allocateCanvas.
/// When a painting command such as drawImage or drawParagraph requires
......@@ -912,15 +913,29 @@ class ContextStateHandle {
strokeJoin = paint.strokeJoin;
if (paint.shader != null) {
final EngineGradient engineShader = paint.shader as EngineGradient;
final Object paintStyle =
engineShader.createPaintStyle(_canvasPool.context, shaderBounds,
density);
fillStyle = paintStyle;
strokeStyle = paintStyle;
_shaderBounds = shaderBounds;
// Align pattern origin to destination.
context.translate(shaderBounds!.left, shaderBounds.top);
if (paint.shader is EngineGradient) {
final EngineGradient engineShader = paint.shader as EngineGradient;
final Object paintStyle =
engineShader.createPaintStyle(_canvasPool.context, shaderBounds,
density);
fillStyle = paintStyle;
strokeStyle = paintStyle;
_shaderBounds = shaderBounds;
// Align pattern origin to destination.
context.translate(shaderBounds!.left, shaderBounds.top);
} else if (paint.shader is EngineImageShader) {
final EngineImageShader imageShader = paint.shader as EngineImageShader;
final Object paintStyle =
imageShader.createPaintStyle(_canvasPool.context, shaderBounds,
density);
fillStyle = paintStyle;
strokeStyle = paintStyle;
if (imageShader.requiresTileOffset) {
_shaderBounds = shaderBounds;
// Align pattern origin to destination.
context.translate(shaderBounds!.left, shaderBounds.top);
}
}
} else if (paint.color != null) {
final String? colorString = colorToCssString(paint.color);
fillStyle = colorString;
......
......@@ -212,10 +212,20 @@ html.HtmlElement _buildDrawRectElement(
..width = '${right - left}px'
..height = '${bottom - top}px'
..backgroundColor = cssColor;
if (paint.shader != null && paint.shader is EngineImageShader) {
_applyImageShaderToElement(rectangle, paint.shader! as EngineImageShader);
}
}
return rectangle;
}
void _applyImageShaderToElement(html.HtmlElement targetElement,
EngineImageShader imageShader) {
final HtmlImage image = imageShader.image;
targetElement.style.backgroundImage = image.imgElement.src;
}
void _applyRRectBorderRadius(html.CssStyleDeclaration style, ui.RRect rrect) {
if (rrect.tlRadiusX == rrect.trRadiusX &&
rrect.tlRadiusX == rrect.blRadiusX &&
......
......@@ -634,8 +634,7 @@ class SurfacePath implements ui.Path {
final ui.Offset lastPoint = pathRef.atPoint(pointCount - 1);
final double lastPointX = lastPoint.dx;
final double lastPointY = lastPoint.dy;
if (!SPath.nearlyEqual(px, lastPointX)
|| SPath.nearlyEqual(py, lastPointY)) {
if (!SPath.nearlyEqual(px, lastPointX) || !SPath.nearlyEqual(py, lastPointY)) {
lineTo(px, py);
}
}
......
......@@ -8,8 +8,8 @@ import 'dart:math' as math;
import 'package:ui/ui.dart' as ui;
import '../../util.dart';
import 'path_utils.dart';
import '../../util.dart';
/// Stores the path verbs, points and conic weights.
///
......@@ -21,7 +21,7 @@ import 'path_utils.dart';
/// to update caches due to content changes.
class PathRef {
PathRef()
: _fPoints = Float32List(kInitialPointsCapacity * 2),
: fPoints = Float32List(kInitialPointsCapacity * 2),
_fVerbs = Uint8List(kInitialVerbsCapacity) {
_fPointsCapacity = kInitialPointsCapacity;
_fVerbsCapacity = kInitialVerbsCapacity;
......@@ -55,7 +55,7 @@ class PathRef {
int _fPointsCapacity = 0;
int _fPointsLength = 0;
int _fVerbsCapacity = 0;
Float32List _fPoints;
Float32List fPoints;
Uint8List _fVerbs;
int _fVerbsLength = 0;
int _conicWeightsCapacity = 0;
......@@ -82,8 +82,8 @@ class PathRef {
void setPoint(int pointIndex, double x, double y) {
assert(pointIndex < _fPointsLength);
int index = pointIndex * 2;
_fPoints[index] = x;
_fPoints[index + 1] = y;
fPoints[index] = x;
fPoints[index + 1] = y;
}
/// Creates a copy of the path by pointing new path to a current
......@@ -91,7 +91,7 @@ class PathRef {
/// more verbs, this copy only returns path at the time of copy and shares
/// typed arrays of original path.
PathRef.shallowCopy(PathRef ref)
: _fPoints = ref._fPoints,
: fPoints = ref.fPoints,
_fVerbs = ref._fVerbs {
_fVerbsCapacity = ref._fVerbsCapacity;
_fVerbsLength = ref._fVerbsLength;
......@@ -117,7 +117,7 @@ class PathRef {
debugValidate();
}
Float32List get points => _fPoints;
Float32List get points => fPoints;
Float32List? get conicWeights => _conicWeights;
int countPoints() => _fPointsLength;
......@@ -130,12 +130,12 @@ class PathRef {
}
ui.Offset atPoint(int index) {
return ui.Offset(_fPoints[index * 2], _fPoints[index * 2 + 1]);
return ui.Offset(fPoints[index * 2], fPoints[index * 2 + 1]);
}
double pointXAt(int index) => _fPoints[index * 2];
double pointXAt(int index) => fPoints[index * 2];
double pointYAt(int index) => _fPoints[index * 2 + 1];
double pointYAt(int index) => fPoints[index * 2 + 1];
double atWeight(int index) {
return _conicWeights![index];
......@@ -227,10 +227,10 @@ class PathRef {
_fVerbs[1] != SPath.kLineVerb) {
return null;
}
final double x0 = _fPoints[0];
final double y0 = _fPoints[1];
final double x1 = _fPoints[2];
final double y1 = _fPoints[3];
final double x0 = fPoints[0];
final double y0 = fPoints[1];
final double x1 = fPoints[2];
final double y1 = fPoints[3];
if (y0 == y1 || x0 == x1) {
return ui.Rect.fromLTRB(x0, y0, x1, y1);
}
......@@ -324,7 +324,7 @@ class PathRef {
return false;
}
for (int i = 0, len = pointCount * 2; i < len; i++) {
if (_fPoints[i] != ref._fPoints[i]) {
if (fPoints[i] != ref.fPoints[i]) {
return false;
}
}
......@@ -383,7 +383,7 @@ class PathRef {
/// Returns a new path by translating [source] by [offsetX], [offsetY].
PathRef.shiftedFrom(PathRef source, double offsetX, double offsetY)
: _fPoints = _fPointsFromSource(source, offsetX, offsetY),
: fPoints = _fPointsFromSource(source, offsetX, offsetY),
_fVerbs = _fVerbsFromSource(source) {
_conicWeightsCapacity = source._conicWeightsCapacity;
_conicWeightsLength = source._conicWeightsLength;
......@@ -422,7 +422,7 @@ class PathRef {
additionalReservePoints);
js_util.callMethod(_fVerbs, 'set', [ref._fVerbs]);
js_util.callMethod(_fPoints, 'set', [ref._fPoints]);
js_util.callMethod(fPoints, 'set', [ref.fPoints]);
if (ref._conicWeights == null) {
_conicWeights = null;
} else {
......@@ -448,8 +448,8 @@ class PathRef {
if (newLength > _fPointsCapacity) {
_fPointsCapacity = newLength + 10;
Float32List newPoints = Float32List(_fPointsCapacity * 2);
js_util.callMethod(newPoints, 'set', <dynamic>[_fPoints]);
_fPoints = newPoints;
js_util.callMethod(newPoints, 'set', <dynamic>[fPoints]);
fPoints = newPoints;
}
_fPointsLength = newLength;
}
......@@ -486,7 +486,7 @@ class PathRef {
for (int source = pointCount * 2 - 1, dst = newPointCount * 2 - 1;
source >= 0;
source--, dst--) {
_fPoints[dst] = sourcePoints[source];
fPoints[dst] = sourcePoints[source];
}
final int verbCount = countVerbs();
final int newVerbCount = source.countVerbs();
......@@ -564,14 +564,14 @@ class PathRef {
fIsFinite = true;
} else {
double minX, maxX, minY, maxY;
minX = maxX = _fPoints[0];
minX = maxX = fPoints[0];
accum *= minX;
minY = maxY = _fPoints[1];
minY = maxY = fPoints[1];
accum *= minY;
for (int i = 2, len = 2 * pointCount; i < len; i += 2) {
final double x = _fPoints[i];
final double x = fPoints[i];
accum *= x;
final double y = _fPoints[i + 1];
final double y = fPoints[i + 1];
accum *= y;
minX = math.min(minX, x);
minY = math.min(minY, y);
......@@ -765,7 +765,7 @@ class PathRef {
if (numPts != 0) {
int curLength = countPoints();
_resizePoints(curLength + numPts);
_fPoints.setAll(curLength * 2, path._fPoints);
fPoints.setAll(curLength * 2, path.fPoints);
}
final int numConics = path.countWeights();
......@@ -814,7 +814,7 @@ class PathRef {
Float32List getPoints() {
debugValidate();
return _fPoints;
return fPoints;
}
static const int kMinSize = 256;
......@@ -864,8 +864,8 @@ class PathRef {
final double boundsRight = bounds.right;
final double boundsBottom = bounds.bottom;
for (int i = 0, len = _fPointsLength * 2; i < len; i += 2) {
final double pointX = _fPoints[i];
final double pointY = _fPoints[i + 1];
final double pointX = fPoints[i];
final double pointY = fPoints[i + 1];
double tolerance = 0.0001;
final bool pointIsFinite = pointX.isFinite && pointY.isFinite;
if (pointIsFinite &&
......@@ -897,10 +897,10 @@ class PathRef {
int findMaxY(int pointIndex, int count) {
assert(count > 0);
// move to y component.
double max = _fPoints[pointIndex * 2 + 1];
double max = fPoints[pointIndex * 2 + 1];
int firstIndex = pointIndex;
for (int i = 1; i < count; i++) {
double y = _fPoints[(pointIndex + i) * 2];
double y = fPoints[(pointIndex + i) * 2];
if (y > max) {
max = y;
firstIndex = pointIndex + i;
......@@ -921,8 +921,8 @@ class PathRef {
// we wrapped around, so abort
break;
}
if (_fPoints[index * 2] != _fPoints[i * 2] ||
_fPoints[index * 2 + 1] != _fPoints[i * 2 + 1]) {
if (fPoints[index * 2] != fPoints[i * 2] ||
fPoints[index * 2 + 1] != fPoints[i * 2 + 1]) {
// found a different point, success!
break;
}
......
......@@ -60,12 +60,12 @@ class SPath {
return x < 0 ? -1 : ((x > 0) ? 1 : 0);
}
// Snaps a value to zero if almost zero (within tolerance).
static double snapToZero(double value) => nearlyEqual(value, 0.0) ? 0.0 : value;
static bool nearlyEqual(double value1, double value2) =>
(value1 - value2).abs() < SPath.scalarNearlyZero;
// Snaps a value to zero if almost zero (within tolerance).
static double snapToZero(double value) => SPath.nearlyEqual(value, 0.0) ? 0.0 : value;
static bool isInteger(double value) => value.floor() == value;
}
......
......@@ -357,8 +357,6 @@ class RecordingCanvas {
void drawRect(ui.Rect rect, SurfacePaint paint) {
assert(!_recordingEnded);
assert(paint.shader == null || paint.shader is! EngineImageShader,
'ImageShader not supported yet');
if (paint.shader != null) {
renderStrategy.hasArbitraryPaint = true;
}
......@@ -375,8 +373,6 @@ class RecordingCanvas {
void drawRRect(ui.RRect rrect, SurfacePaint paint) {
assert(!_recordingEnded);
assert(paint.shader == null || paint.shader is! EngineImageShader,
'ImageShader not supported yet');
if (paint.shader != null || !rrect.webOnlyUniformRadii) {
renderStrategy.hasArbitraryPaint = true;
}
......@@ -393,8 +389,6 @@ class RecordingCanvas {
void drawDRRect(ui.RRect outer, ui.RRect inner, SurfacePaint paint) {
assert(!_recordingEnded);
assert(paint.shader == null || paint.shader is! EngineImageShader,
'ImageShader not supported yet');
// Check the inner bounds are contained within the outer bounds
// see: https://cs.chromium.org/chromium/src/third_party/skia/src/core/SkCanvas.cpp?l=1787-1789
ui.Rect innerRect = inner.outerRect;
......@@ -453,8 +447,6 @@ class RecordingCanvas {
void drawOval(ui.Rect rect, SurfacePaint paint) {
assert(!_recordingEnded);
assert(paint.shader == null || paint.shader is! EngineImageShader,
'ImageShader not supported yet');
renderStrategy.hasArbitraryPaint = true;
_didDraw = true;
final double paintSpread = _getPaintSpread(paint);
......@@ -469,8 +461,6 @@ class RecordingCanvas {
void drawCircle(ui.Offset c, double radius, SurfacePaint paint) {
assert(!_recordingEnded);
assert(paint.shader == null || paint.shader is! EngineImageShader,
'ImageShader not supported yet');
renderStrategy.hasArbitraryPaint = true;
_didDraw = true;
final double paintSpread = _getPaintSpread(paint);
......@@ -488,8 +478,6 @@ class RecordingCanvas {
void drawPath(ui.Path path, SurfacePaint paint) {
assert(!_recordingEnded);
assert(paint.shader == null || paint.shader is! EngineImageShader,
'ImageShader not supported yet');
if (paint.shader == null) {
// For Rect/RoundedRect paths use drawRect/drawRRect code paths for
// DomCanvas optimization.
......
......@@ -101,51 +101,6 @@ abstract class GlRenderer {
/// This class gets instantiated on demand by Vertices constructor. For apps
/// that don't use Vertices WebGlRenderer will be removed from release binary.
class _WebGlRenderer implements GlRenderer {
/// Cached vertex shader reused by [drawVertices] and gradients.
static String? _textureVertexShader;
static void _setupVertexTransforms(
GlContext gl,
GlProgram glProgram,
double offsetX,
double offsetY,
double widthInPixels,
double heightInPixels,
Matrix4 transform) {
Object transformUniform =
gl.getUniformLocation(glProgram.program, 'u_ctransform');
Matrix4 transformAtOffset = transform.clone()
..translate(-offsetX, -offsetY);
gl.setUniformMatrix4fv(transformUniform, false, transformAtOffset.storage);
// Set uniform to scale 0..width/height pixels coordinates to -1..1
// clipspace range and flip the Y axis.
Object resolution = gl.getUniformLocation(glProgram.program, 'u_scale');
gl.setUniform4f(resolution, 2.0 / widthInPixels.toDouble(),
-2.0 / heightInPixels.toDouble(), 1, 1);
Object shift = gl.getUniformLocation(glProgram.program, 'u_shift');
gl.setUniform4f(shift, -1, 1, 0, 0);
}
static void _setupTextureScalar(
GlContext gl, GlProgram glProgram, double sx, double sy) {
Object scalar = gl.getUniformLocation(glProgram.program, 'u_texscale');
gl.setUniform2f(scalar, sx, sy);
}
static dynamic _tileModeToGlWrapping(GlContext gl, ui.TileMode tileMode) {
switch (tileMode) {
case ui.TileMode.clamp:
return gl.kClampToEdge;
case ui.TileMode.decal:
return gl.kClampToEdge;
case ui.TileMode.mirror:
return gl.kMirroredRepeat;
case ui.TileMode.repeated:
return gl.kRepeat;
}
}
@override
void drawVertices(
html.CanvasRenderingContext2D? context,
......@@ -193,10 +148,10 @@ class _WebGlRenderer implements GlRenderer {
final String vertexShader = imageShader == null
? VertexShaders.writeBaseVertexShader()
: writeTextureVertexShader();
: VertexShaders.writeTextureVertexShader();
final String fragmentShader = imageShader == null
? _writeVerticesFragmentShader()
: _writeVerticesTextureFragmentShader(
: FragmentShaders.writeTextureFragmentShader(
isWebGl2, imageShader.tileModeX, imageShader.tileModeY);
GlContext gl =
......@@ -208,15 +163,17 @@ class _WebGlRenderer implements GlRenderer {
Object? positionAttributeLocation =
gl.getAttributeLocation(glProgram.program, 'position');
_setupVertexTransforms(gl, glProgram, offsetX, offsetY,
setupVertexTransforms(gl, glProgram, offsetX, offsetY,
widthInPixels.toDouble(), heightInPixels.toDouble(), transform);
if (imageShader != null) {
/// To map from vertex position to texture coordinate in 0..1 range,
/// we setup scalar to be used in vertex shader.
_setupTextureScalar(
setupTextureTransform(
gl,
glProgram,
0.0,
0.0,
1.0 / imageShader.image.width.toDouble(),
1.0 / imageShader.image.height.toDouble());
}
......@@ -240,7 +197,8 @@ class _WebGlRenderer implements GlRenderer {
gl.enableVertexAttribArray(positionAttributeLocation);
// Bind buffer as position buffer and transfer data.
gl.bindArrayBuffer(positionsBuffer);
gl.bufferData(positions, gl.kStaticDraw);
bufferVertexData(gl, positions, 1.0);
// Setup data format for attribute.
js_util.callMethod(gl.glContext, 'vertexAttribPointer', <dynamic>[
positionAttributeLocation,
......@@ -293,10 +251,10 @@ class _WebGlRenderer implements GlRenderer {
// Texture REPEAT and MIRROR is only supported in WebGL 2, for
// WebGL 1.0 we let shader compute correct uv coordinates.
gl.texParameteri(gl.kTexture2D, gl.kTextureWrapS,
_tileModeToGlWrapping(gl, imageShader.tileModeX));
tileModeToGlWrapping(gl, imageShader.tileModeX));
gl.texParameteri(gl.kTexture2D, gl.kTextureWrapT,
_tileModeToGlWrapping(gl, imageShader.tileModeY));
tileModeToGlWrapping(gl, imageShader.tileModeY));
// Mipmapping saves your texture in different resolutions
// so the graphics card can choose which resolution is optimal
......@@ -339,9 +297,6 @@ class _WebGlRenderer implements GlRenderer {
context.restore();
}
static final Uint16List _vertexIndicesForRect =
Uint16List.fromList(<int>[0, 1, 2, 2, 3, 0]);
/// Renders a rectangle using given program into an image resource.
///
/// Browsers that support OffscreenCanvas and the transferToImageBitmap api
......@@ -434,7 +389,7 @@ class _WebGlRenderer implements GlRenderer {
Object? indexBuffer = gl.createBuffer();
gl.bindElementArrayBuffer(indexBuffer);
gl.bufferElementData(_vertexIndicesForRect, gl.kStaticDraw);
gl.bufferElementData(VertexShaders.vertexIndicesForRect, gl.kStaticDraw);
if (gl.containsUniform(glProgram.program, 'u_resolution')) {
Object uRes = gl.getUniformLocation(glProgram.program, 'u_resolution');
......@@ -446,26 +401,7 @@ class _WebGlRenderer implements GlRenderer {
gl.viewport(0, 0, widthInPixels.toDouble(), heightInPixels.toDouble());
gl.drawElements(
gl.kTriangles, _vertexIndicesForRect.length, gl.kUnsignedShort);
}
static String writeTextureVertexShader() {
if (_textureVertexShader == null) {
ShaderBuilder builder = ShaderBuilder(webGLVersion);
builder.addIn(ShaderType.kVec4, name: 'position');
builder.addUniform(ShaderType.kMat4, name: 'u_ctransform');
builder.addUniform(ShaderType.kVec4, name: 'u_scale');
builder.addUniform(ShaderType.kVec2, name: 'u_texscale');
builder.addUniform(ShaderType.kVec4, name: 'u_shift');
builder.addOut(ShaderType.kVec2, name: 'v_texcoord');
ShaderMethod method = builder.addMethod('main');
method.addStatement(
'gl_Position = ((u_ctransform * position) * u_scale) + u_shift;');
method.addStatement('v_texcoord = vec2(position.x * u_texscale.x, '
'(position.y * u_texscale.y));');
_textureVertexShader = builder.build();
}
return _textureVertexShader!;
gl.kTriangles, VertexShaders.vertexIndicesForRect.length, gl.kUnsignedShort);
}
/// This fragment shader enables Int32List of colors to be passed directly
......@@ -486,34 +422,6 @@ class _WebGlRenderer implements GlRenderer {
return builder.build();
}
String _writeVerticesTextureFragmentShader(
bool isWebGl2, ui.TileMode? tileModeX, ui.TileMode? tileModeY) {
ShaderBuilder builder = ShaderBuilder.fragment(webGLVersion);
builder.floatPrecision = ShaderPrecision.kMedium;
builder.addIn(ShaderType.kVec2, name: 'v_texcoord');
builder.addUniform(ShaderType.kSampler2D, name: 'u_texture');
ShaderMethod method = builder.addMethod('main');
if (isWebGl2 ||
tileModeX == null ||
tileModeY == null ||
(tileModeX == ui.TileMode.clamp && tileModeY == ui.TileMode.clamp)) {
method.addStatement('${builder.fragmentColor.name} = '
'${builder.texture2DFunction}(u_texture, v_texcoord);');
} else {
// Repeat and mirror are not supported for webgl1. Write code to
// adjust texture coordinate.
//
// This will write u and v floats, clamp/repeat and mirror the value and
// pass it to sampler.
method.addTileStatements('v_texcoord.x', 'u', tileModeX);
method.addTileStatements('v_texcoord.y', 'v', tileModeY);
method.addStatement('vec2 uv = vec2(u, v);');
method.addStatement('${builder.fragmentColor.name} = '
'${builder.texture2DFunction}(u_texture, uv);');
}
return builder.build();
}
@override
void drawHairline(
html.CanvasRenderingContext2D? _ctx, Float32List positions) {
......@@ -589,7 +497,7 @@ ui.Rect _transformBounds(
math.max(y0, math.max(y1, math.max(y2, y3))));
}
// Converts from [VertexMode] triangleFan and triangleStrip to triangles.
/// Converts from [VertexMode] triangleFan and triangleStrip to triangles.
Float32List convertVertexPositions(ui.VertexMode mode, Float32List positions) {
assert(mode != ui.VertexMode.triangles);
if (mode == ui.VertexMode.triangleFan) {
......
......@@ -2,20 +2,267 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:html' as html;
import 'dart:js_util' as js_util;
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui;
import '../offscreen_canvas.dart';
import '../render_vertices.dart';
import '../../browser_detection.dart';
import '../../html_image_codec.dart';
import '../../vector_math.dart';
import 'vertex_shaders.dart';
import 'webgl_context.dart';
class EngineImageShader implements ui.ImageShader {
EngineImageShader(ui.Image image, this.tileModeX, this.tileModeY,
this.matrix4, this.filterQuality)
: this.image = image as HtmlImage;
Float64List matrix4, this.filterQuality)
: this.image = image as HtmlImage,
this.matrix4 = Float32List.fromList(matrix4);
final ui.TileMode tileModeX;
final ui.TileMode tileModeY;
final Float64List matrix4;
final Float32List matrix4;
final ui.FilterQuality? filterQuality;
final HtmlImage image;
/// Whether fill pattern requires transform to shift tiling offset.
bool requiresTileOffset = false;
Object createPaintStyle(html.CanvasRenderingContext2D context,
ui.Rect? shaderBounds, double density) {
/// Creates a canvas rendering context pattern based on image and tile modes.
final ui.TileMode tileX = tileModeX;
final ui.TileMode tileY = tileModeY;
if (tileX != ui.TileMode.clamp && tileY != ui.TileMode.clamp) {
return context.createPattern(
_resolveTiledImageSource(image, tileX, tileY)!,
_tileModeToHtmlRepeatAttribute(tileX, tileY))!;
} else {
initWebGl();
return _createGlShader(context, shaderBounds!, density);
}
}
/// Converts tilemode to CSS repeat attribute.
///
/// CSS and Canvas2D createPattern apis only support repeated tiles.
/// For mirroring we create a new image with mirror builtin so both
/// repeated and mirrored modes can be supported when applied to
/// html element background-image or used by createPattern api.
String _tileModeToHtmlRepeatAttribute(
ui.TileMode tileModeX, ui.TileMode tileModeY) {
final bool repeatX =
tileModeX == ui.TileMode.repeated || tileModeX == ui.TileMode.mirror;
final bool repeatY =
tileModeY == ui.TileMode.repeated || tileModeY == ui.TileMode.mirror;
return repeatX
? (repeatY ? 'repeat' : 'repeat-x')
: (repeatY ? 'repeat-y' : 'no-repeat');
}
/// Tiles the image and returns an image or canvas element to be used as
/// source for a repeated pattern.
///
/// Other alternative was to create a webgl shader for the area and
/// tile in the shader, but that will generate a much larger image footprint
/// when the pattern is small. So we opt here for mirroring by
/// redrawing the image 2 or 4 times into a new bitmap.
Object? _resolveTiledImageSource(
HtmlImage image, ui.TileMode tileX, ui.TileMode tileY) {
final int mirrorX = tileX == ui.TileMode.mirror ? 2 : 1;
final int mirrorY = tileY == ui.TileMode.mirror ? 2 : 1;
/// If we have no mirror, we can use image directly as pattern.
if (mirrorX == 1 && mirrorY == 1) {
return image.imgElement;
}
/// Create a new image by mirroring.
final int imageWidth = image.width;
final int imageHeight = image.height;
final int newWidth = imageWidth * mirrorX;
final int newHeight = imageHeight * mirrorY;
OffScreenCanvas offscreenCanvas = OffScreenCanvas(newWidth, newHeight);
Object renderContext = offscreenCanvas.getContext2d()!;
for (int y = 0; y < mirrorY; y++) {
for (int x = 0; x < mirrorX; x++) {
int flipX = x != 0 ? -1 : 1;
int flipY = y != 0 ? -1 : 1;
/// To draw image flipped we set translate and scale and pass
/// negative width/height to drawImage.
if (flipX != 1 || flipY != 1) {
js_util.callMethod(renderContext, 'scale', <dynamic>[flipX, flipY]);
}
js_util.callMethod(renderContext, 'drawImage', <dynamic>[
image.imgElement,
x == 0 ? 0 : -2 * imageWidth,
y == 0 ? 0 : -2 * imageHeight
]);
if (flipX != 1 || flipY != 1) {
/// Restore transform. This is faster than save/restore on context.
js_util.callMethod(renderContext, 'scale', <dynamic>[flipX, flipY]);
}
}
}
// When using OffscreenCanvas and transferToImageBitmap is supported by
// browser create ImageBitmap otherwise use more expensive canvas
// allocation.
if (OffScreenCanvas.supported &&
offscreenCanvas.transferToImageBitmapSupported) {
return offscreenCanvas.transferToImageBitmap();
} else {
html.CanvasElement canvas =
html.CanvasElement(width: newWidth, height: newHeight);
final html.CanvasRenderingContext2D ctx = canvas.context2D;
offscreenCanvas.transferImage(ctx);
return canvas;
}
}
/// Creates an image with tiled/transformed images.
html.CanvasPattern _createGlShader(html.CanvasRenderingContext2D? context,
ui.Rect shaderBounds, double density) {
final Matrix4 transform = Matrix4.fromFloat32List(matrix4);
final double dpr = ui.window.devicePixelRatio;
int widthInPixels = (shaderBounds.width * dpr).ceil();
int heightInPixels = (shaderBounds.height * dpr).ceil();
assert(widthInPixels > 0 && heightInPixels > 0);
/// Render tiles into a bitmap and create a canvas pattern.
final bool isWebGl2 = webGLVersion == WebGLVersion.webgl2;
final String vertexShader = VertexShaders.writeTextureVertexShader();
final String fragmentShader = FragmentShaders.writeTextureFragmentShader(
isWebGl2, tileModeX, tileModeY);
/// Render gradient into a bitmap and create a canvas pattern.
OffScreenCanvas offScreenCanvas =
OffScreenCanvas(widthInPixels, heightInPixels);
GlContext gl = GlContext(offScreenCanvas);
gl.setViewportSize(widthInPixels, heightInPixels);
GlProgram glProgram = gl.cacheProgram(vertexShader, fragmentShader);
gl.useProgram(glProgram);
const int vertexCount = 6;
final Float32List vertices = Float32List(vertexCount * 2);
ui.Rect vRect = shaderBounds.translate(-shaderBounds.left, -shaderBounds.top);
vertices[0] = vRect.left;
vertices[1] = vRect.top;
vertices[2] = vRect.right;
vertices[3] = vRect.top;
vertices[4] = vRect.right;
vertices[5] = vRect.bottom;
vertices[6] = vRect.right;
vertices[7] = vRect.bottom;
vertices[8] = vRect.left;
vertices[9] = vRect.bottom;
vertices[10] = vRect.left;
vertices[11] = vRect.top;
Object? positionAttributeLocation =
gl.getAttributeLocation(glProgram.program, 'position');
setupVertexTransforms(gl, glProgram, 0, 0,
widthInPixels.toDouble(), heightInPixels.toDouble(), transform);
requiresTileOffset = shaderBounds.left !=0 || shaderBounds.top != 0;
/// To map from vertex position to texture coordinate in 0..1 range,
/// we setup scalar to be used in vertex shader.
setupTextureTransform(
gl,
glProgram,
shaderBounds.left,
shaderBounds.top,
1.0 / image.width.toDouble(),
1.0 / image.height.toDouble());
/// Setup geometry.
///
/// Create buffer for vertex coordinates.
Object positionsBuffer = gl.createBuffer()!;
assert(positionsBuffer != null); // ignore: unnecessary_null_comparison
Object? vao;
if (isWebGl2) {
/// Create a vertex array object.
vao = gl.createVertexArray();
/// Set vertex array object as active one.
gl.bindVertexArray(vao!);
}
/// Turn on position attribute.
gl.enableVertexAttribArray(positionAttributeLocation);
/// Bind buffer as position buffer and transfer data.
gl.bindArrayBuffer(positionsBuffer);
bufferVertexData(gl, vertices, ui.window.devicePixelRatio);
/// Setup data format for attribute.
js_util.callMethod(gl.glContext, 'vertexAttribPointer', <dynamic>[
positionAttributeLocation,
2,
gl.kFloat,
false,
0,
0,
]);
/// Copy image to the texture.
Object? texture = gl.createTexture();
/// Texture units are a global array of references to the textures.
/// By setting activeTexture, we associate the bound texture to a unit.
/// Every time we call a texture function such as texImage2D with a target
/// like TEXTURE_2D, it looks up texture by using the currently active
/// unit.
/// In our case we have a single texture unit 0.
gl.activeTexture(gl.kTexture0);
gl.bindTexture(gl.kTexture2D, texture);
gl.texImage2D(gl.kTexture2D, 0, gl.kRGBA, gl.kRGBA, gl.kUnsignedByte,
image.imgElement);
if (isWebGl2) {
/// Texture REPEAT and MIRROR is only supported in WebGL 2, for
/// WebGL 1.0 we let shader compute correct uv coordinates.
gl.texParameteri(gl.kTexture2D, gl.kTextureWrapS,
tileModeToGlWrapping(gl, tileModeX));
gl.texParameteri(gl.kTexture2D, gl.kTextureWrapT,
tileModeToGlWrapping(gl, tileModeY));
/// Mipmapping saves your texture in different resolutions
/// so the graphics card can choose which resolution is optimal
/// without artifacts.
gl.generateMipmap(gl.kTexture2D);
} else {
/// For webgl1, if a texture is not mipmap complete, then the return
/// value of a texel fetch is (0, 0, 0, 1), so we have to set
/// minifying function to filter.
/// See https://www.khronos.org/registry/webgl/specs/1.0.0/#5.13.8.
gl.texParameteri(gl.kTexture2D, gl.kTextureWrapS, gl.kClampToEdge);
gl.texParameteri(gl.kTexture2D, gl.kTextureWrapT, gl.kClampToEdge);
gl.texParameteri(gl.kTexture2D, gl.kTextureMinFilter, gl.kLinear);
}
/// Finally render triangles.
gl.clear();
gl.drawTriangles(vertexCount, ui.VertexMode.triangles);
if (vao != null) {
gl.unbindVertexArray();
}
Object? bitmapImage = gl.readPatternData();
gl.bindArrayBuffer(null);
gl.bindElementArrayBuffer(null);
return context!.createPattern(bitmapImage!, 'no-repeat')!;
}
}
......@@ -2,12 +2,21 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui;
import 'shader_builder.dart';
import '../../browser_detection.dart';
/// Provides common shaders used for gradients and drawVertices APIs.
class VertexShaders {
static final Uint16List vertexIndicesForRect =
Uint16List.fromList(<int>[0, 1, 2, 2, 3, 0]);
/// Cached vertex shaders.
static String? _baseVertexShader;
static String? _textureVertexShader;
/// Creates a vertex shader transforms pixel space [Vertices.positions] to
/// final clipSpace -1..1 coordinates with inverted Y Axis.
......@@ -39,4 +48,53 @@ class VertexShaders {
}
return _baseVertexShader!;
}
static String writeTextureVertexShader() {
if (_textureVertexShader == null) {
ShaderBuilder builder = ShaderBuilder(webGLVersion);
builder.addIn(ShaderType.kVec4, name: 'position');
builder.addUniform(ShaderType.kMat4, name: 'u_ctransform');
builder.addUniform(ShaderType.kVec4, name: 'u_scale');
builder.addUniform(ShaderType.kVec4, name: 'u_textransform');
builder.addUniform(ShaderType.kVec4, name: 'u_shift');
builder.addOut(ShaderType.kVec2, name: 'v_texcoord');
ShaderMethod method = builder.addMethod('main');
method.addStatement(
'gl_Position = ((u_ctransform * position) * u_scale) + u_shift;');
method.addStatement('v_texcoord = vec2((u_textransform.z + position.x) * u_textransform.x, '
'((u_textransform.w + position.y) * u_textransform.y));');
_textureVertexShader = builder.build();
}
return _textureVertexShader!;
}
}
class FragmentShaders {
static String writeTextureFragmentShader(
bool isWebGl2, ui.TileMode? tileModeX, ui.TileMode? tileModeY) {
ShaderBuilder builder = ShaderBuilder.fragment(webGLVersion);
builder.floatPrecision = ShaderPrecision.kMedium;
builder.addIn(ShaderType.kVec2, name: 'v_texcoord');
builder.addUniform(ShaderType.kSampler2D, name: 'u_texture');
ShaderMethod method = builder.addMethod('main');
if (isWebGl2 ||
tileModeX == null ||
tileModeY == null ||
(tileModeX == ui.TileMode.clamp && tileModeY == ui.TileMode.clamp)) {
method.addStatement('${builder.fragmentColor.name} = '
'${builder.texture2DFunction}(u_texture, v_texcoord);');
} else {
// Repeat and mirror are not supported for webgl1. Write code to
// adjust texture coordinate.
//
// This will write u and v floats, clamp/repeat and mirror the value and
// pass it to sampler.
method.addTileStatements('v_texcoord.x', 'u', tileModeX);
method.addTileStatements('v_texcoord.y', 'v', tileModeY);
method.addStatement('vec2 uv = vec2(u, v);');
method.addStatement('${builder.fragmentColor.name} = '
'${builder.texture2DFunction}(u_texture, uv);');
}
return builder.build();
}
}
......@@ -11,6 +11,7 @@ import 'package:ui/ui.dart' as ui;
import '../offscreen_canvas.dart';
import '../../browser_detection.dart';
import '../../vector_math.dart';
/// Compiled and cached gl program.
class GlProgram {
......@@ -486,3 +487,59 @@ class GlContextCache {
return _cachedContext;
}
}
void setupVertexTransforms(
GlContext gl,
GlProgram glProgram,
double offsetX,
double offsetY,
double widthInPixels,
double heightInPixels,
Matrix4 transform) {
Object transformUniform =
gl.getUniformLocation(glProgram.program, 'u_ctransform');
Matrix4 transformAtOffset = transform.clone()
..translate(-offsetX, -offsetY);
gl.setUniformMatrix4fv(transformUniform, false, transformAtOffset.storage);
// Set uniform to scale 0..width/height pixels coordinates to -1..1
// clipspace range and flip the Y axis.
Object resolution = gl.getUniformLocation(glProgram.program, 'u_scale');
gl.setUniform4f(resolution, 2.0 / widthInPixels.toDouble(),
-2.0 / heightInPixels.toDouble(), 1, 1);
Object shift = gl.getUniformLocation(glProgram.program, 'u_shift');
gl.setUniform4f(shift, -1, 1, 0, 0);
}
void setupTextureTransform(
GlContext gl, GlProgram glProgram, double offsetx, double offsety, double sx, double sy) {
Object scalar = gl.getUniformLocation(glProgram.program, 'u_textransform');
gl.setUniform4f(scalar, sx, sy, offsetx, offsety);
}
void bufferVertexData(GlContext gl, Float32List positions,
double devicePixelRatio) {
if (devicePixelRatio == 1.0) {
gl.bufferData(positions, gl.kStaticDraw);
} else {
final int length = positions.length;
Float32List scaledList = Float32List(length);
for (int i = 0; i < length; i++) {
scaledList[i] = positions[i] * devicePixelRatio;
}
gl.bufferData(scaledList, gl.kStaticDraw);
}
}
dynamic tileModeToGlWrapping(GlContext gl, ui.TileMode tileMode) {
switch (tileMode) {
case ui.TileMode.clamp:
return gl.kClampToEdge;
case ui.TileMode.decal:
return gl.kClampToEdge;
case ui.TileMode.mirror:
return gl.kMirroredRepeat;
case ui.TileMode.repeated:
return gl.kRepeat;
}
}
\ No newline at end of file
......@@ -5,6 +5,7 @@
import 'dart:typed_data';
import 'package:ui/src/engine.dart';
import 'package:ui/src/engine/html/path/path_utils.dart';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
......
......@@ -8,6 +8,7 @@ import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/ui.dart' hide window;
import 'package:ui/src/engine.dart';
import 'package:ui/src/engine/html/path/path_utils.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
......
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:html' as html;
import 'dart:js_util' as js_util;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/ui.dart' hide TextStyle;
import 'package:ui/src/engine.dart';
import 'screenshot.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() async {
const double screenWidth = 400.0;
const double screenHeight = 400.0;
const Rect screenRect = Rect.fromLTWH(0, 0, screenWidth, screenHeight);
final HtmlImage testImage = createTestImage();
setUp(() async {
debugEmulateFlutterTesterEnvironment = true;
await webOnlyInitializePlatform();
webOnlyFontCollection.debugRegisterTestFonts();
await webOnlyFontCollection.ensureFontsLoaded();
});
void _drawShapes(RecordingCanvas rc, SurfacePaint paint, Rect shaderRect) {
/// Rect.
rc.drawRect(shaderRect, paint);
shaderRect = shaderRect.translate(100, 0);
/// Circle.
rc.drawCircle(shaderRect.center, shaderRect.width / 2, paint);
shaderRect = shaderRect.translate(110, 0);
/// Oval.
rc.drawOval(Rect.fromLTWH(shaderRect.left, shaderRect.top, shaderRect.width, shaderRect.height / 2), paint);
shaderRect = shaderRect.translate(-210, 120);
/// Path.
Path path = Path()
..moveTo(shaderRect.center.dx, shaderRect.top)
..lineTo(shaderRect.right, shaderRect.bottom)
..lineTo(shaderRect.left, shaderRect.bottom)
..close();
rc.drawPath(path, paint);
shaderRect = shaderRect.translate(100, 0);
/// RRect.
rc.drawRRect(RRect.fromRectXY(shaderRect, 10, 20), paint);
shaderRect = shaderRect.translate(110, 0);
/// DRRect.
rc.drawDRRect(RRect.fromRectXY(shaderRect, 20, 30),
RRect.fromRectXY(shaderRect.deflate(24), 16, 24),
paint);
shaderRect = shaderRect.translate(-200, 120);
}
Future<void> testImageShader(
TileMode tmx, TileMode tmy, String fileName,
{double maxDiffRatePercent = 0.0}) async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, screenWidth, screenHeight));
//Rect shaderRect = const Rect.fromLTRB(20, 20, 100, 100);
Rect shaderRect = const Rect.fromLTRB(0, 0, 100, 100);
final SurfacePaint paint = Paint() as SurfacePaint;
paint.shader =
ImageShader(testImage, tmx, tmy, Matrix4.identity().toFloat64()
, filterQuality: FilterQuality.high);
_drawShapes(rc, paint, shaderRect);
expect(rc.renderStrategy.hasArbitraryPaint, isTrue);
await canvasScreenshot(rc, fileName,
region: screenRect, maxDiffRatePercent: maxDiffRatePercent);
}
test('Should draw with tiled imageshader.', () async {
await testImageShader(
TileMode.repeated, TileMode.repeated, 'image_shader_tiled',
maxDiffRatePercent: 5.0);
});
test('Should draw with horizontally mirrored imageshader.', () async {
await testImageShader(
TileMode.mirror, TileMode.repeated, 'image_shader_horiz_mirror',
maxDiffRatePercent: 6.0);
});
test('Should draw with vertically mirrored imageshader.', () async {
await testImageShader(
TileMode.repeated, TileMode.mirror, 'image_shader_vert_mirror',
maxDiffRatePercent: 5.0);
});
test('Should draw with mirrored imageshader.', () async {
await testImageShader(
TileMode.mirror, TileMode.mirror, 'image_shader_mirror',
maxDiffRatePercent: 6.0);
});
test('Should draw with horizontal clamp imageshader.', () async {
await testImageShader(
TileMode.clamp, TileMode.repeated, 'image_shader_clamp_horiz',
maxDiffRatePercent: 13.0);
});
test('Should draw with vertical clamp imageshader.', () async {
await testImageShader(
TileMode.repeated, TileMode.clamp, 'image_shader_clamp_vertical',
maxDiffRatePercent: 1.0);
});
test('Should draw with clamp imageshader.', () async {
await testImageShader(
TileMode.clamp, TileMode.clamp, 'image_shader_clamp',
maxDiffRatePercent: 1.0);
});
}
HtmlImage createTestImage() {
const int width = 16;
const int width2 = width ~/ 2;
const int height = 16;
html.CanvasElement canvas =
new html.CanvasElement(width: width, height: height);
html.CanvasRenderingContext2D ctx = canvas.context2D;
ctx.fillStyle = '#E04040';
ctx.fillRect(0, 0, width2, width2);
ctx.fill();
ctx.fillStyle = '#40E080';
ctx.fillRect(width2, 0, width2, width2);
ctx.fill();
ctx.fillStyle = '#2040E0';
ctx.fillRect(width2, width2, width2, width2);
ctx.fill();
html.ImageElement imageElement = html.ImageElement();
imageElement.src = js_util.callMethod(canvas, 'toDataURL', <dynamic>[]);
return HtmlImage(imageElement, width, height);
}
......@@ -10,6 +10,8 @@ import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/ui.dart' hide window;
import 'package:ui/src/engine.dart';
import 'package:ui/src/engine/html/path/path_ref.dart';
import 'package:ui/src/engine/html/path/path_utils.dart';
import 'matchers.dart';
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册