提交 94b377a5 编写于 作者: A Adam Barth 提交者: GitHub

Remove the specs directory (#2908)

These documents either got moved to the web site (e.g., gestures and
style-guide), became reality (e.g., accessibility and pointer), are outdated
(e.g., events) or are captured in bugs (e.g., keyboard and linkability).
上级 bebf0237
This file contains documentation for what we hope Sky to support in due course.
It's mostly proposals. It's not intended to be comprehensive.
Accessibility
=============
iOS and Android accessibility APIs want to synchronously communicate
with the main thread, so we need to ship a description of the UI to a
service on the main thread, from our Dart thread.
On the Dart side, we need a RenderAccessibleBox on the render tree,
which can be configured with all the various things the accessibility
APIs might care about (e.g. accessible name, checkedness, etc).
Separate from the layout and paint phases we have a phase that builds
an accessibility tree and sends it over to the aforementioned thread.
When a RenderAccessibleBox's configuration changes, it updates the
main thread also.
Maybe RenderParagraph also participates in this so that all text is
automatically exposed.
The main thread can send events like "activate" or "increment slider"
back to a RenderAccessibleBox, which should expose them somehow.
On the Widget side we'd have an Accessible widget that is a
OneChildRenderNodeWrapper for RenderAccessibleBox and does what you'd
expect, maybe also exposing the callbacks.
Components would wrap their interactive parts with these Accessible
widgets.
Ideally we'd have a way to make the default "activate" action
automatically turn into a "tap" gesture.
Debug Mode
==========
Sky can be switched into debug mode, in which:
- documented asserts are checked and logged to the console
- if you miss frame deadlines, your screen flashes (in addition to
the exceptions throwing)
- the dartanalyzer is run on all files and output logged to the
console
- code is run in checked mode
Design Principles
=================
See [https://flutter.io/design-principles/].
Sky Event Model
===============
```dart
import 'dart:collection';
import 'dart:async';
class ExceptionAndStackTrace<T> {
const ExceptionAndStackTrace(this.exception, this.stackTrace);
final T exception;
final StackTrace stackTrace;
}
class ExceptionListException<T> extends IterableMixin<ExceptionAndStackTrace<T>> implements Exception {
List<ExceptionAndStackTrace<T>> _exceptions;
void add(T exception, [StackTrace stackTrace = null]) {
if (_exceptions == null)
_exceptions = new List<ExceptionAndStackTrace<T>>();
_exceptions.add(new ExceptionAndStackTrace<T>(exception, stackTrace));
}
int get length => _exceptions == null ? 0 : _exceptions.length;
Iterator<ExceptionAndStackTrace<T>> get iterator => _exceptions.iterator;
}
typedef bool Filter<T>(T t);
typedef void Handler<T>(T t);
class DispatcherController<T> {
DispatcherController() : dispatcher = new Dispatcher<T>();
final Dispatcher<T> dispatcher;
void add(T data) => dispatcher._add(data);
}
class Dispatcher<T> {
List<Pair<Handler, ZoneUnaryCallback>> _listeners;
void listen(Handler<T> handler) {
// you should not throw out of this handler
if (_listeners == null)
_listeners = new List<Pair<Handler, ZoneUnaryCallback>>();
_listeners.add(new Pair<Handler, ZoneUnaryCallback>(handler, Zone.current.bindUnaryCallback(handler)));
}
bool unlisten(Handler<T> handler) {
if (_listeners == null)
return false;
var target = _listeners.lastWhere((v) => v.a == handler, orElse: () => null);
if (target == null)
return false;
_listeners.removeAt(_listeners.lastIndexOf(target));
return true;
}
void _add(T data) {
if (_listeners == null)
return;
ExceptionListException exceptions = new ExceptionListException();
// we make a copy of the list here so that the listeners can
// mutate our list without worry
_listeners.toList().forEach((Pair<Handler, ZoneUnaryCallback> item) {
try {
item.b(data);
} catch (exception, stackTrace) {
exceptions.add(exception, stackTrace);
}
});
if (exceptions.length > 0)
throw exceptions;
}
Dispatcher<T> where(Filter<T> filter) => new WhereDispatcher<T>(this, filter);
Dispatcher<T> until(Filter<T> filter) {
var subdispatcher = new Dispatcher<T>();
Handler handler;
handler = (T data) {
if (filter(data))
unlisten(handler);
else
subdispatcher._add(data);
};
listen(handler);
return subdispatcher;
}
Future<T> firstWhere(Filter<T> filter) {
Completer completer = new Completer();
Handler handler;
handler = (T data) {
if (filter(data)) {
completer.complete(data);
unlisten(handler);
}
};
listen(handler);
return completer.future;
}
}
class WhereDispatcher<T> extends Dispatcher {
WhereDispatcher(this.parent, this.filter) : super();
Dispatcher parent;
Filter filter;
void listen(Handler<T> handler) {
if (_listeners == null || _listeners.length == 0)
parent.listen(_handler);
super.listen(handler);
}
bool unlisten(Handler<T> handler) {
var result = super.unlisten(handler);
if (result && _listeners.length == 0)
parent.unlisten(_handler);
return result;
}
void _handler(T data) {
if (filter(data))
_add(data);
}
}
abstract class Event<ReturnType> {
Event() { init(); }
void init() { }
bool get bubbles;
EventTarget _target;
EventTarget get target => _target;
EventTarget _currentTarget;
EventTarget get currentTarget => _currentTarget;
bool handled; // precise semantics depend on the event type, but in general, set this when you set result
ReturnType result;
bool resultIsCompatible(dynamic candidate) => candidate is ReturnType;
// TODO(ianh): abstract API for doing things at shadow tree boundaries
// TODO(ianh): do events get blocked at scope boundaries, e.g. focus events when both sides are in the scope?
// TODO(ianh): do events get retargetted, e.g. focus when leaving a custom element?
// e.g. sent from inside a shadow tree, when exiting the shadow tree, focus event should:
// - disappear if we're moving from one to another element
// - be targetted if it's going to another node in a different scope
}
class EventTarget {
EventTarget() : _eventsController = new DispatcherController<Event>();
Dispatcher get events => _eventsController.dispatcher;
EventTarget get parentNode;
List<EventTarget> getEventDispatchChain() {
if (this.parentNode == null) {
return [this];
} else {
var result = this.parentNode.getEventDispatchChain();
result.insert(0, this);
return result;
}
}
final DispatcherController _eventsController;
dynamic dispatchEvent(Event event, { dynamic defaultResult: null }) { // O(N*M) where N is the length of the chain and M is the average number of listeners per link in the chain
// note: this will throw an ExceptionListException<ExceptionListException> if any of the listeners threw
assert(event != null); // event must be non-null
event.handled = false;
assert(event.resultIsCompatible(defaultResult));
event.result = defaultResult;
event._target = this;
var chain;
if (event.bubbles)
chain = this.getEventDispatchChain();
else
chain = [this];
var exceptions = new ExceptionListException<ExceptionListException>();
for (var link in chain) {
try {
link._dispatchEventLocally(event);
} on ExceptionListException catch (e) {
exceptions.add(e);
}
}
if (exceptions.length > 0)
throw exceptions;
return event.result;
}
void _dispatchEventLocally(Event event) {
event._currentTarget = this;
_eventsController.add(event);
}
}
```
Fonts
=====
Mojo has a font service, which takes a URL and hands back glyphs.
Sky has an API that takes a URL, a name, and hands back a promise
which, when resolved, indicates that Sky has now associated that font
name with the relevant glyphs (by calling the Mojo service).
The text part of Sky's drawing API accepts a list of font names, and
uses those to draw the relevant glyphs, falling back through the
provided fonts, then all the loaded fonts, until a glyph is found.
By default, three fonts are loaded, with the names 'serif',
'sans-serif', and 'monospace'. They have good Unicode coverage.
TODO(ianh): Actually define these APIs and so on.
Gestures
========
Flutter's Gesture API has the following key components:
* Pointers
* Gesture Recognisers
* Gesture Arenas
Pointers represent contact points on the display surface, also referred to as "touches". Pointers fire events describing when they are down, moved, up, or canceled.
Gesture recognisers examine sequences of pointer events and map them to higher-level descriptions like "tap", "drag", and so forth.
Gesture arenas disambiguate gestures when multiple recognisers in contention.
Pointers
--------
TODO(ianh): elaborate
Gesture Arenas
--------------
TODO(ianh): elaborate
Gesture Recognisers
-------------------
TODO(ianh): elaborate
Sample Scenarios
----------------
TODO(ianh): elaborate
Limitations
-----------
Flutter does not currently support the following features:
* Mice, trackballs, trackpads, joysticks, "mouse keys", and other input mechanisms that map to persistent pointers.
* Joysticks, trackballs, tabbing, and other mechanisms that map to directional or sequential focus navigation.
* Hover touch effects, where touches are detected before being "down".
Keyboard Events
===============
Scope
-----
The Sky keyboard API is intended to handle the following:
- reporting raw key down/up events from physical keyboards ("Alt"
down, "E" down, "E" up, "Alt" up)
- reporting simulated raw key down/up events from virtual keyboards,
if the keyboard provides them
- IME
- reporting input text events from physical and virtual keyboards
("é", autorepeat)
- inline editing of typed word
- backspace
- autocorrect
- editing around app-provided chips
- adjusting editor UI (line height, word spacing, etc)
- replacing selection
- providing per-phrase alternative interpretations
- composing letters
- composing words
API
---
TODO(ianh): Write API.
Linkability
===========
This file has ideas only, so far. Comments welcome.
Scenarios
---------
* soduku app:
* want to share the board layout, but not my progress so far
* want to transfer my progress to another device
* a wikipedia-like app:
* follow links to other topics
* have links from other apps (e.g. search) to a specific subsection of a topic
* an instant-messenger app:
* bookmark specific group conversations
* a social network app:
* link to specific posts
* link to social network constructs like user lists (twitter), communities (g+), walls (fb), users…
* an IDE
* want to save a particular state of open windows, maybe to share with other developers
* Navigation app
* want to link to different modes of the app: nav mode, search page, personal profile, settings, etc
* want to link to specific points of interest, either public (restaurant) or private (home, work, saved locations)
* want to link to a specific map location, zoom level, direction, angle, time of day (for shadows), route (for navigation)
* want to link to a destination in nav mode (without a route)
* Podcast app
* want to link to a specific view (e.g. in doggcatcher, feeds, audio, video, news...)
* want to link to a specific podcast (maybe independently of the app)
* want to link to a specific time in a specific episode of a specific podcast
* save ui state (e.g. size of ui area vs podcast list in doggcatcher, scroll position in a list, specific settings window being on top of specific tab at a specific scroll position, etc)
* News app
* categories
* articles
* sets of categories
* sets of categories + a selected category + a scroll position
* app sections (e.g. Newsstand’s Explore vs Read Now vs My Library)
* specific settings in the settings section of the app
* Code Review Tools
* a specific code review
* a specific file in a specific code review
* code review plus scroll position
* specific comment
* the state of the UI, such as which changes are visible, which comments are expanded, sort settings, filter settings, etc; whether the settings window is open, what tab it’s open to, what field is focused…
UI
--
* Sharing current state to another device using NFC: just put the phones together, the active app(s?) serialise their state to a “URL” and that is sent to the other device
* App exposes a “permalink” or “get link” UI that exposes a string you can Share (a la Android’s Share intent) or copy and paste.
* An accessibility tree should expose the URL of each part of the app so that a user with an accessibility tool can bookmark a particular location in the app to jump to it later.
Thoughts
--------
* Seems like you link to three kinds of things:
* different in-app concepts, which might be shared across apps
* specific posts in a social network
* users
* particular game board starting configurations, game levels
* wikipedia topics
* search results
* POIs in a map
* videos on Vimeo, YouTube, etc
* a code review / CL / pull request
* a comment on a code review
* a file in a code review
* a comment in a blog post
* telephone numbers
* lat/long coordinates
* podcasts
* different top-level parts of the app (shallow state)
* e.g. in Facebook, linking to the stream; in G+, linking to the communities landing page, etc
* in a maps app, the mode (satellite, navigation, etc)
* deep state
* the current state of a particular game board, e.g. all the piece positions in chess, all the current choices in soduku...
* what windows are open, what field is focused, what widgets are expanded, the precise view of a 3D map, etc
* subsection of a topic in wikipedia (scroll position)
* Since almost every app is going to have app-specific items, we need to make the item space trivially extensible (no registry, no fixed vocabulary). This means that common items (e.g. lat/long coordinates, podcasts) will probably evolve conventions organically within communities rather than in a centralised fashion
* We don’t have to use URLs as they are known today, but doing so would leverage the existing infrastructure which might be valuable
Ideas
-----
* Two kinds of URLs: application state, and “things”.
* Application state URLs consist of an identifier for the app, plus a blob of data for how to open the app
* Thing URLs identify a thing, either by string name, opaque identifier, or more structured data (e.g. two comma-separated floating point numbers for lat/long).
* Thing URLs have a label saying what they are, e.g. “poi” or “geo” or “cl-comment” or something.
* Maybe “apps” are just things, and going to an app is like picking that app thing from the system app, the same way you’d pick a post from a social network app.
Mojom IDL
=========
The Mojom IDL language is used to describe the APIs exposed over Mojo
pipes.
Mojom IDL definitions are typically compiled to wrappers in each
language, which are then used as imports.
TODO(ianh): Define in detail how this actually works
Pointer Events
==============
Scope
-----
The following input devices are supported by sky:
- fingers on multitouch screens
- mice, including mouse wheels
- styluses on screens
- other devices that emulate mice (track pads, track balls)
- [keyboards and IMEs](keyboard.md)
The following input devices are not supported natively by sky, but can
be used by connecting directly to the mojo application servicing the
relevant device:
- joysticks
- track balls that move focus (or raw data from track balls)
- raw data from track pads (e.g. multitouch gestures)
- raw data from styluses that have their own absolute pads
- raw data from mice (e.g. to handle mouse capture in 3D games)
The following interactions are intended to be easy to handle:
- one finger starts panning, another finger is placed on the surface
(and ignored), the first finger is lifted, and the second finger
continues panning (without the scroll position jumping when the
first finger is lifted)
- right-clicking doesn't trigger buttons by default
- fingers after the first within a surface don't trigger buttons by
default
- if there are two independent surfaces, they capture fingers
unrelated to each other
Frameworks are responsible for converting pointer events described
below into widget-specific events such as the following:
- a click/tap/activation, as distinct from a short drag
- a context menu request (e.g. right-click, long-press)
- a drag (moving an item)
- a pan (scroll)
- a zoom/rotation (whether using two finger gestures, or one finger
with the double-tap-and-hold gesture)
- a double-tap autozoom
In particular, this means distinguishing whether a finger tap consists
of a tap, a drag, or a long-press; it also means distinguishing
whether a drag, once established as such, should be treated as a pan
or a drag, and deciding whether a secondary touch should begin a
zoom/rotation or not.
This is done using the [gesture recogniser API](gestures.md)
Pointers
--------
Each touch or pointer is tracked individually.
New touches and pointers can appear and disappear over time.
Each pointer has a list of current targets.
When a new one enters the system, a non-bubbling ``PointerAddedEvent``
event is fired at the application's element tree's root node, and the
pointer's current targets list is initialised to just that Root
object.
When it is removed, a non-bubbling ``PointerRemovedEvent`` event is
fired at the application's element tree's root node and at any other
objects in the pointer's current targets list. Currently, at the time
of a ``PointerRemoved``, the list will always consist of only the
application's element tree's root node.
A pointer can be "up" or "down". Initially all pointers are "up".
A pointer switches from "up" to "down" when it is a touch or stylus
that is in contact with the display surface, or when it is a mouse
that is being clicked, and from "down" back to "up" when this ends.
(Note that clicking a button on a stylus doesn't change it from up to
down. A stylus can have a button pressed while "up".) In the case of a
mouse with multiple buttons, the pointer switches back to "up" only
when all the buttons have been released.
When a pointer switches from "up" to "down", the following algorithm
is run:
1. Hit test the position of the pointer, let 'node' be the result.
2. Fire a bubbling ``PointerDownEvent`` event at the layoutManager
for 'node', with an empty array as the default return value. Let
'result1' be the returned value.
3. If result1 is not an array of EventTarget objects, set it to the
empty array and (if this is debug mode) report the issue.
4. Fire a bubbling ``PointerDownEvent`` event at the Element for
'node', with an empty array as the default return value. Let
'result2' be the returned value.
5. If result2 is not an array of EventTarget objects, set it to the
empty array and (if this is debug mode) report the issue.
6. Let result be the concatenation of result1's contents, result2's
contents, and the application's element tree's root node.
7. Let 'result' be this pointer's current targets.
When an object is one of the current targets of a pointer and no other
pointers have that object as a current target so far, and either there
are no buttons (touch, stylus) or only the primary button is active
(mouse) and this is not an inverted stylus, then that pointer is
considered the "primary" pointer for that object. The pointer remains
the primary pointer for that object until the corresponding
``PointerUpEvent`` event (even if the buttons change).
When a pointer moves, a non-bubbling ``PointerMoveEvent`` event is
fired at each of the pointer's current targets in turn (maintaining
the order they had in the ``PointerDownEvent`` event, if there's more
than one). If the return value of a ``PointerMovedEvent`` event is
'cancel', and the pointer is currently down, then the pointer is
canceled (see below).
When a pointer's button state changes but this doesn't impact whether
it is "up" or "down", e.g. when a mouse with a button down gets a
second button down, or when a stylus' buttons change state, but the
pointer doesn't simultaneously move, then a ``PointerMovedEvent``
event is fired anyway, as described above, but with dx=dy=0.
When a pointer switches from "down" to "up", a non-bubbling
``PointerUpEvent`` event is fired at each of the pointer's current
targets in turn (maintaining the order they had in the
``PointerDownEvent`` event, if there's more than one), and then the
pointer's current target list is emptied except for the application's
element tree's root node. The buttons exposed on the
``PointerUpEvent`` event are those that were down immediately prior to
the buttons being released.
At the time of a ``PointerUpEvent`` event, for each object that is a
current target of the pointer, and for which the pointer is considered
the "primary" pointer for that object, if there is another pointer
that is already down, which is of the same kind, which also has that
object as a current target, and that has either no buttons or only its
primary button active, then that pointer becomes the new "primary"
pointer for that object before the ``PointerUpEvent`` event is sent.
Otherwise, the "primary" pointer stops being "primary" just _after_
the ``PointerUpEvent`` event. (This matters for whether the 'primary'
field is set.)
When a pointer is canceled, if it is "down", pretend that the pointer
moved to "up", sending ``PointerUpEvent`` as described above, and
entirely empty its current targets list. AFter the pointer actually
switches from "down" to "up", replace the current targets list with an
object that only contains the application's element tree's root node.
Nothing special happens when a pointer's current target moves in the
DOM.
The x and y position of an -up or -down event always match those of
the previous -moved or -added event, so their dx and dy are always 0.
Positions are floating point numbers; they can have subpixel values.
For each pointer, only a single ``PointerAddedEvent`` or
``PointerRemovedEvent`` event is fired per frame. If a pointer would
have been added and removed in the same frame, the pointer is ignored,
and no events are fired for that pointer.
For each pointer, only a single ``PointerDownEvent`` or
``PointerUpEvent`` event is fired per frame, representing the change
in state from the last frame, if any. Exactly when the event is fired
is up to the implementation and may depend on the hardware.
For each pointer, at most two ``PointerMoveEvent`` events are fired
per frame, one before the ``PointerDownEvent`` or ``PointerUpEvent``
event, if any, and one after. If the pointer didn't change "down"
state, then only one ``PointerMoveEvent`` event is fired. All the
actual moves that the pointer experienced are coallesced into the
event.
Example:
If a mouse experiences the following events:
- move +1, down, move +2, up, move +4, down, move +8
...the events might be:
- move +7, down, move +8
...or:
- move +1, down, move +14
TODO(ianh): expose the unfiltered uncoalesced stream of events for
programs that want more precision (e.g. drawing apps)
These events have the following fields (see below for the class
definitions):
pointer: an integer assigned to this touch or pointer when it
enters the system, never reused, increasing
monotonically every time a new value is assigned,
starting from 1 (if the system gets a new tap every
microsecond, this will cause a problem after 285
years)
kind: one of 'touch', 'mouse', 'stylus', 'inverted-stylus'
x: x-position relative to the top-left corner of the
surface of the node on which the event was fired
y: y-position relative to the top-left corner of the
surface of the node on which the event was fired
dx: difference in x-position since last
``PointerMovedEvent`` event
dy: difference in y-position since last
``PointerMovedEvent`` event
buttons: a bitfield of the buttons pressed, from the following
list:
1: primary mouse button (not available on stylus)
2: secondary mouse button, primary stylus button
3: middle mouse button, secondary stylus button
4: back button
5: forward button
additional buttons can be represented by numbers
greater than six:
n: (n-2)th mouse button, ignoring any buttons that
are explicitly back or forward buttons
(n-4)th stylus button, again ignoring any
explictly back or forward buttons
note that stylus buttons can be pressed even when the
pointer is not "down"
e.g. if the left mouse button and the right mouse
button are pressed at the same time, the value will
be 3 (bits 1 and 2); if the right mouse button and
the back button are pressed at the same time, the
value will be 10 (bits 2 and 4)
down: true if the pointer is down (in ``PointerDownEvent``
event or subsequent ``PointerMoveEvent`` events);
false otherwise (in ``PointerAdded``, ``PointerUp``,
and ``PointerRemovedEvent`` events, and in
``PointerMoveEvent`` events that aren't between
``PointerDownEvent`` and ``PointerUpEvent`` events)
primary: true if this is a primary pointer/touch (see above)
can only be set for ``PointerMovedEvent`` and
``PointerUpEvent``
obscured: true if the system was rendering another view on top
of the sky application at the time of the event (this
is intended to enable click-jacking protections)
When down is true:
pressure: the pressure of the touch as a number ranging from
0.0, indicating a touch with no discernible pressure,
to 1.0, indicating a touch with "normal" pressure,
and possibly beyond, indicating a stronger touch; for
devices that do not detect pressure (e.g. mice),
returns 1.0
pressure-min: the minimum value that pressure can return for this
pointer
pressure-max: the maximum value that pressure can return for this
pointer
When kind is 'touch', 'stylus', or 'stylus-inverted':
distance: distance of detected object from surface (e.g.
distance of stylus or finger from screen), if
supported and down is not true, otherwise 0.0.
distance-min: the minimum value that distance can return for this
pointer (always 0.0)
distance-max: the maximum value that distance can return for this
pointer (0.0 if not supported)
When kind is 'touch', 'stylus', or 'stylus-inverted' and down is true:
radius-major: the radius of the contact ellipse along the major
axis, in pixels
radius-minor: the radius of the contact ellipse along the major
axis, in pixels
radius-min: the minimum value that could be reported for
radius-major or radius-minor for this pointer
radius-max: the maximum value that could be reported for
radius-major or radius-minor for this pointer
When kind is 'touch' and down is true:
orientation: the angle of the contact ellipse, in radians in the
range
-pi/2 < orientation <= pi/2
...giving the angle of the major axis of the ellipse
with the y-axis (negative angles indicating an
orientation along the top-left / bottom-right
diagonal, positive angles indicating an orientation
along the top-right / bottom-left diagonal, and zero
indicating an orientation parallel with the y-axis)
When kind is 'stylus' or 'stylus-inverted':
orientation: the angle of the stylus, in radians in the range
-pi < orientation <= pi
...giving the angle of the axis of the stylus
projected onto the screen, relative to the positive
y-axis of the screen (thus 0 indicates the stylus, if
projected onto the screen, would go from the contact
point vertically up in the positive y-axis direction,
pi would indicate that the stylus would go down in
the negative y-axis direction; pi/4 would indicate
that the stylus goes up and to the right, -pi/2 would
indicate that the stylus goes to the left, etc)
tilt: the angle of the stylus, in radians in the range
0 <= tilt <= pi/2
...giving the angle of the axis of the stylus,
relative to the axis perpendicular to the screen
(thus 0 indicates the stylus is orthogonal to the
plane of the screen, while pi/2 indicates that the
stylus is flat on the screen)
TODO(ianh): add an API that exposes the currently existing pointers,
so that you can determine e.g. if you have a mouse.
Here are the class definitions for pointer events:
```dart
enum PointerKind { touch, mouse, stylus, invertedStylus }
abstract class PointerEvent<T> extends Event<T> {
PointerEvent({ this.pointer,
this.kind,
this.x, this.y,
this.dx: 0.0, this.dy: 0.0,
this.buttons: 0,
this.down: false,
this.primary: false,
this.obscured: false,
this.pressure, this.minPressure, this.maxPressure,
this.distance, this.minDistance, this.maxDistance,
this.radiusMajor, this.radiusMinor, this.minRadius, this.maxRadius,
this.orientation, this.tilt
}) : super();
final int pointer;
final PointerKind kind;
final double x; // logical pixels
final double y; // logical pixels
final double dx; // logical pixels
final double dy; // logical pixels
final int buttons; // bit field
static const int primaryMouseButton = 0x01;
static const int secondaryMouseButton = 0x02;
static const int primaryStylusButton = 0x02;
static const int middleMouseButton = 0x04;
static const int secondaryStylusButton = 0x04;
static const int backButton = 0x08;
static const int forwardButton = 0x10;
final bool down;
final bool primary;
final bool obscured;
// if down != true, these are all null
final double pressure; // normalised, 0.0 means none, 1.0 means "normal"
final double minPressure; // 0 <= minPressure <= 1.0
final double maxPressure; // maxPressure >= 1.0
// if kind != touch, stylus, or invertedStylus, these are all null
final double distance; // logical pixels
final double minDistance; // logical pixels
final double maxDistance; // logical pixels
// if down != true or kind != touch, stylus, or invertedStylus, these are all null
final double radiusMajor; // logical pixels
final double radiusMinor; // logical pixels
final double minRadius; // logical pixels
final double maxRadius; // logical pixels
// if down != true or kind != touch, stylus, or invertedStylus, this is null
final double orientation; // radians // meaning is different for touch and stylus/invertedStylus
// if kind != stylus or invertedStylus, this is null
final double tilt; // radians
}
// the following uses proposed syntax from
// https://code.google.com/p/dart/issues/detail?id=22274
// to avoid duplicating that entire constructor up there
class PointerAddedEvent extends PointerEvent<Null> {
PointerAddedEvent = PointerEvent;
bool get bubbles => false;
}
class PointerRemovedEvent extends PointerEvent<Null> {
PointerRemovedEvent = PointerEvent;
bool get bubbles => false;
}
class PointerDownEvent extends PointerEvent<List<EventTarget>> {
@override void init() { result = new List<EventTarget>(); }
PointerDownEvent = PointerEvent;
bool get bubbles => true;
}
class PointerUpEvent extends PointerEvent<Null> {
PointerUpEvent = PointerEvent;
bool get bubbles => false;
}
class PointerMovedEvent extends PointerEvent<Null> {
PointerMovedEvent = PointerEvent;
bool get bubbles => false;
}
/*
```
Wheel events
------------
When a wheel input device is turned, a ``WheelEvent`` event that
doesn't bubble is fired at the application's element tree's root node,
with the following fields:
wheel: an integer assigned to this wheel by the system. The
same wheel on the same system must always be given
the same ID. The primary wheel (e.g. the vertical
wheel on a mouse) must be given ID 1.
delta: an floating point number representing the fraction of
the wheel that was turned, with positive numbers
representing a downward movement on vertical wheels,
rightward movement on horizontal wheels, and a
clockwise movement on wheels with a user-facing side.
Additionally, if the wheel is associated with a pointer (e.g. a mouse
wheel), the following fields must be present also:
pointer: the integer assigned to the pointer in its
``PointerAddEvent`` event (see above).
x: x-position relative to the top-left corner of the
display, in global layout coordinates
y: x-position relative to the top-left corner of the
display, in global layout coordinates
Note: The only wheels that are supported are mouse wheels and physical
dials. Track balls are not reported as mouse wheels.
```dart
*/
class WheelEvent extends Event {
WheelEvent({ this.wheel,
this.delta: 0.0,
this.pointer,
this.x, this.y
}) : super();
final int wheel;
final double delta; // revolutions (or fractions thereof)
final int pointer;
final double x; // logical pixels
final double y; // logical pixels
bool get bubbles => false;
}
```
Scheduler API
=============
```dart
typedef void TimerCallback();
class TaskSettings {
const TaskSettings({
this.idle: false, // tasks that should run during the idle phase
this.layout: false, // tasks that should run during the layout phase
this.paint: false, // tasks that should run during the paint phase
this.touch: false, // tasks that should run while a pointer is down
});
final bool idle;
final bool layout;
final bool paint;
final bool touch;
}
const idleTask = const TaskSettings(idle: true);
const t0 = null;
const t1ms = const Duration(milliseconds: 1.0);
// Priorities
// (these are intentionally not constants, so you can tweak them at runtime)
int IdlePriority = 0; // tasks that can be delayed arbitrarily
int FutureLayoutPriority = 1000; // tasks that prepare layout
int TimerAnimationPriority = 3000; // tasks related to animations
int InputPriority = 4000; // input events
int InputAnimationPriority = 5000; // framework-fired events for scrolling
class Task {
external Task(callback, {
Duration delay: t0, // how long to wait before scheduling this task; null means run it now (same as duration 0)
Duration budget: t1ms, // how long to allow the task to run before firing an exception; null means no timeout
TaskSettings settings: idleTask, // what phases to allow the task to run during
int priority: 0, // the greater the number, the more likely it is to run
bool defer: false // punts this task until the next loop (after we're done with paint)
});
external void cancel(); // prevents the task from running, if it hasn't run yet
external bool get active; // true until fired or until canceled
}
// The Dart native mechanisms for scheduling tasks, as listed below,
// get configured as follows:
//
// delay: duration argument for the Timer constructors, otherwise null (0)
// budget: 1ms
// settings: same as for the task that triggered this task
// priority: same as for the task that triggered this task
// defer: false
//
// method: scheduleMicrotask(Function void callback())
// constructor: Future.microtask(...) // calls scheduleMicrotask() to do the work
// constructor: Timer (Duration duration, Function void callback())
// constructor: Timer.periodic(Duration duration, Function void callback(Timer timer))
```
Flutter Style Guide
===================
See [https://flutter.io/style-guide/].
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册