1 // Copyright 2018 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 // Copied with modifications from //ash/accessibility, refactored for use in
6 // chromecast.
7 
8 #ifndef CHROMECAST_BROWSER_ACCESSIBILITY_TOUCH_EXPLORATION_CONTROLLER_H_
9 #define CHROMECAST_BROWSER_ACCESSIBILITY_TOUCH_EXPLORATION_CONTROLLER_H_
10 
11 #include <map>
12 #include <memory>
13 #include <vector>
14 
15 #include "base/macros.h"
16 #include "base/memory/weak_ptr.h"
17 #include "base/timer/timer.h"
18 #include "base/values.h"
19 #include "chromecast/browser/accessibility/accessibility_sound_player.h"
20 #include "ui/accessibility/ax_enums.mojom.h"
21 #include "ui/events/event.h"
22 #include "ui/events/event_rewriter.h"
23 #include "ui/events/gesture_detection/gesture_detector.h"
24 #include "ui/events/gestures/gesture_provider_aura.h"
25 #include "ui/gfx/geometry/point.h"
26 
27 namespace aura {
28 class Window;
29 }
30 
31 namespace ui {
32 class Event;
33 class GestureEvent;
34 class GestureProviderAura;
35 class TouchEvent;
36 }  // namespace ui
37 
38 namespace chromecast {
39 namespace shell {
40 
41 // A delegate to handle commands in response to detected accessibility gesture
42 // events.
43 class TouchExplorationControllerDelegate {
44  public:
~TouchExplorationControllerDelegate()45   virtual ~TouchExplorationControllerDelegate() {}
46 
47   // Called when the user performed an accessibility gesture while in touch
48   // accessibility mode, that should be forwarded to ChromeVox.
49   virtual void HandleAccessibilityGesture(ax::mojom::Gesture gesture) = 0;
50 
51   // Called when the user has performed a single tap, if it is not within
52   // lift activation bounds.
53   virtual void HandleTap(const gfx::Point touch_location) = 0;
54 };
55 
56 // TouchExplorationController is used in tandem with "Spoken Feedback" to
57 // make the touch UI accessible. Gestures performed in the middle of the screen
58 // are mapped to accessibility key shortcuts while gestures performed on the
59 // edge of the screen can change settings.
60 //
61 // ** Short version **
62 //
63 // At a high-level, single-finger events are used for accessibility -
64 // exploring the screen gets turned into mouse moves (which can then be
65 // spoken by an accessibility service running), a single tap while the user
66 // is in touch exploration or a double-tap simulates a click, and gestures
67 // can be used to send high-level accessibility commands. For example, a swipe
68 // right would correspond to the keyboard short cut shift+search+right.
69 // Swipes with up to four fingers are also mapped to commands. Slide
70 // gestures performed on the edge of the screen can change settings
71 // continuously. For example, sliding a finger along the right side of the
72 // screen will change the volume. When a user double taps and holds with one
73 // finger, the finger is passed through as if accessibility was turned off. If
74 // the user taps the screen with two fingers, the user can silence spoken
75 // feedback if it is playing.
76 //
77 // ** Long version **
78 //
79 // Here are the details of the implementation:
80 //
81 // When the first touch is pressed, a 300 ms grace period timer starts.
82 //
83 // If the user keeps their finger down for more than 300 ms and doesn't
84 // perform a supported accessibility gesture in that time (e.g. swipe right),
85 // they enter touch exploration mode, and all movements are translated into
86 // synthesized mouse move events.
87 //
88 // Also, if the user moves their single finger outside a certain slop region
89 // (without performing a gesture), they enter touch exploration mode earlier
90 // than 300 ms.
91 //
92 // If the user taps and releases their finger, after 300 ms from the initial
93 // touch, a single mouse move is fired.
94 //
95 // While in touch exploration mode, the user can perform a single tap
96 // if the user releases their finger and taps before 300 ms passes.
97 // This will result in a click on the last successful touch exploration
98 // location. This allows the user to perform a single tap
99 // anywhere to activate it. (See more information on simulated clicks
100 // below.)
101 //
102 // The user can perform swipe gestures in one of the four cardinal directions
103 // which will be interpreted and used to control the UI. All gestures will only
104 // be registered if the fingers move outside the slop, and all fingers will only
105 // be registered if they are completed within the grace period. If a single
106 // finger gesture fails to be completed within the grace period, the state
107 // changes to touch exploration mode. If a multi finger gesture fails to be
108 // completed within the grace period, the user must lift all fingers before
109 // completing any more actions.
110 //
111 // The user's initial tap sets the anchor point. Simulated events are
112 // positioned relative to the anchor point, so that after exploring to find
113 // an object the user can double-tap anywhere on the screen to activate it.
114 // The anchor point is also set by ChromeVox every time it highlights an
115 // object on the screen. During touch exploration this ensures that
116 // any simulated events go to the center of the most recently highlighted
117 // object, rather than to the exact tap location (which could have drifted
118 // off of the object). This also ensures that when the current ChromeVox
119 // object changes due to a gesture or input focus changing, simulated
120 // events go to that object and not the last location touched by a finger.
121 //
122 // When the user double-taps, this is treated as a discrete gestures, and
123 // and event is sent to ChromeVox to activate the current object, whatever
124 // that is. However, when the user double-taps and holds, any event from that
125 // finger is passed through, allowing the user to drag. These events are
126 // passed through with a location that's relative to the anchor point.
127 //
128 // If any other fingers are added or removed during a passthrough, they are
129 // ignored. Once the passthrough finger is released, passthrough stops and
130 // the state is reset to the no fingers down state.
131 //
132 // If the user enters touch exploration mode, they can click without lifting
133 // their touch exploration finger by tapping anywhere else on the screen with
134 // a second finger, while the touch exploration finger is still pressed.
135 //
136 // Once touch exploration mode has been activated, it remains in that mode until
137 // all fingers have been released.
138 //
139 // If the user places a finger on the edge of the screen and moves their finger
140 // past slop, a slide gesture is performed. The user can then slide one finger
141 // along an edge of the screen and continuously control a setting. Once the user
142 // enters this state, the boundaries that define an edge expand so that the user
143 // can now adjust the setting within a slightly bigger width along the screen.
144 // If the user exits this area without lifting their finger, they will not be
145 // able to perform any actions, however if they keep their finger down and
146 // return to the "hot edge," then they can still adjust the setting. In order to
147 // perform other touch accessibility movements, the user must lift their finger.
148 // If additional fingers are added while in this state, the user will transition
149 // to passthrough.
150 //
151 // Currently, only the right edge is mapped to control the volume. Volume
152 // control along the edge of the screen is directly proportional to where the
153 // user's finger is located on the screen. The top right corner of the screen
154 // automatically sets the volume to 100% and the bottome right corner of the
155 // screen automatically sets the volume to 0% once the user has moved past slop.
156 //
157 // If the user taps the screen with two fingers and lifts both fingers before
158 // the grace period has passed, spoken feedback is silenced.
159 //
160 // The user can also enter passthrough by placing a finger on one of the bottom
161 // corners of the screen until an earcon sounds. After the earcon sounds, the
162 // user is in passthrough so all subsequent fingers placed on the screen will be
163 // passed through. Once the finger in the corner has been released, the state
164 // will switch to wait for no fingers.
165 //
166 // The caller is expected to retain ownership of instances of this class and
167 // destroy them before |root_window| is destroyed.
168 class TouchExplorationController : public ui::EventRewriter,
169                                    public ui::GestureProviderAuraClient,
170                                    public ui::GestureConsumer {
171  public:
172   TouchExplorationController(
173       aura::Window* root_window,
174       TouchExplorationControllerDelegate* delegate,
175       AccessibilitySoundPlayer* accessibility_sound_player);
176   ~TouchExplorationController() override;
177 
178   // Make synthesized touch events are anchored at this point. This is
179   // called when the object with accessibility focus is updated via something
180   // other than touch exploration.
181   void SetTouchAccessibilityAnchorPoint(const gfx::Point& anchor_point);
182 
183   // Events within the exclude bounds will not be rewritten.
184   // |bounds| are in root window coordinates.
185   void SetExcludeBounds(const gfx::Rect& bounds);
186 
187   // Overridden from ui::EventRewriter
188   ui::EventDispatchDetails RewriteEvent(
189       const ui::Event& event,
190       const Continuation continuation) override;
191 
192  private:
193   friend class TouchExplorationControllerTestApi;
194 
195   // Event handlers based on the current state - see State, below.
196   ui::EventDispatchDetails InNoFingersDown(const ui::TouchEvent& event,
197                                            const Continuation continuation);
198   ui::EventDispatchDetails InSingleTapPressed(const ui::TouchEvent& event,
199                                               const Continuation continuation);
200   ui::EventDispatchDetails InSingleTapOrTouchExploreReleased(
201       const ui::TouchEvent& event,
202       const Continuation continuation);
203   ui::EventDispatchDetails InDoubleTapPending(const ui::TouchEvent& event,
204                                               const Continuation continuation);
205   ui::EventDispatchDetails InTouchReleasePending(
206       const ui::TouchEvent& event,
207       const Continuation continuation);
208   ui::EventDispatchDetails InTouchExploration(const ui::TouchEvent& event,
209                                               const Continuation continuation);
210   ui::EventDispatchDetails InOneFingerPassthrough(
211       const ui::TouchEvent& event,
212       const Continuation continuation);
213   ui::EventDispatchDetails InGestureInProgress(const ui::TouchEvent& event,
214                                                const Continuation continuation);
215   ui::EventDispatchDetails InTouchExploreSecondPress(
216       const ui::TouchEvent& event,
217       const Continuation continuation);
218   ui::EventDispatchDetails InWaitForNoFingers(const ui::TouchEvent& event,
219                                               const Continuation continuation);
220   ui::EventDispatchDetails InTwoFingerTap(const ui::TouchEvent& event,
221                                           const Continuation continuation);
222 
223   // Returns the current time of the tick clock.
224   base::TimeTicks Now();
225 
226   // This timer is started every time we get the first press event, and
227   // it fires after the double-click timeout elapses (300 ms by default).
228   // If the user taps and releases within 300 ms and doesn't press again,
229   // we treat that as a single mouse move (touch exploration) event.
230   void StartTapTimer();
231   void OnTapTimerFired();
232 
233   // Dispatch a new event outside of the event rewriting flow.
234   void DispatchEvent(ui::Event* event, const Continuation continuation);
235 
236   // Overridden from GestureProviderAuraClient.
237   //
238   // The gesture provider keeps track of all the touch events after
239   // the user moves fast enough to trigger a gesture. After the user
240   // completes their gesture, this method will decide what keyboard
241   // input their gesture corresponded to.
242   void OnGestureEvent(ui::GestureConsumer* raw_input_consumer,
243                       ui::GestureEvent* gesture) override;
244 
245   // Process the gesture events that have been created.
246   void ProcessGestureEvents();
247 
248   void OnSwipeEvent(ui::GestureEvent* swipe_gesture);
249 
250   // Dispatches a single key with the given flags.
251   void DispatchKeyWithFlags(const ui::KeyboardCode key,
252                             int flags,
253                             const Continuation continuation);
254 
255   // Binds DispatchKeyWithFlags to a specific key and flags.
256   base::OnceClosure BindKeyEventWithFlags(const ui::KeyboardCode key,
257                                           int flags,
258                                           const Continuation continuation);
259 
260   std::unique_ptr<ui::MouseEvent> CreateMouseMoveEvent(
261       const gfx::PointF& location,
262       int flags);
263 
264   void EnterTouchToMouseMode();
265 
266   void PlaySoundForTimer();
267 
268   // Sends a simulated click.
269   void SendSimulatedClick(const Continuation continuation);
270 
271   // Sends a simulated tap at anchor point.
272   void SendSimulatedTap(const Continuation continuation);
273 
274   // Sends a simulated tap, if the anchor point falls within lift activation
275   // bounds.
276   void MaybeSendSimulatedTapInLiftActivationBounds(
277       const ui::TouchEvent& event,
278       const Continuation continuation);
279 
280   // Some constants used in touch_exploration_controller:
281 
282   // Within this many dips of the screen edge, the release event generated will
283   // reset the state to NoFingersDown.
284   const float kLeavingScreenEdge = 35;
285 
286   // Touch within this distance from a corner can invoke corner passthrough.
287   const float kMaxDistanceFromEdge = 75;
288 
289   // After a slide gesture has been triggered, if the finger is still within
290   // these bounds (in DIPs), the preset settings will still change.
291   const float kSlopDistanceFromEdge = kMaxDistanceFromEdge + 40;
292 
293   // The split tap slop  is a bit more generous since keeping two
294   // fingers in place is a bit harder.
295   float GetSplitTapTouchSlop();
296 
297   // Convert a gfx::PointF from DIP back to raw screen coordinates.
298   gfx::PointF ConvertDIPToScreenInPixels(const gfx::PointF& location);
299 
300   enum State {
301     // No fingers are down and no events are pending.
302     NO_FINGERS_DOWN,
303 
304     // A single finger is down, but we're not yet sure if this is going
305     // to be touch exploration or something else.
306     SINGLE_TAP_PRESSED,
307 
308     // The user pressed and released a single finger - a tap - but we have
309     // to wait until the end of the grace period to allow the user to tap the
310     // second time. If the second tap doesn't occurs within the grace period,
311     // we dispatch a mouse move at the location of the first tap.
312     SINGLE_TAP_RELEASED,
313 
314     // The user was in touch explore mode and released the finger.
315     // If another touch press occurs within the grace period, a single
316     // tap click occurs. This state differs from SINGLE_TAP_RELEASED
317     // in that if a second tap doesn't occur within the grace period,
318     // there is no mouse move dispatched.
319     TOUCH_EXPLORE_RELEASED,
320 
321     // The user tapped once, and before the grace period expired, pressed
322     // one finger down to begin a double-tap, but has not released it yet.
323     // This could become passthrough, so no touch press is dispatched yet.
324     DOUBLE_TAP_PENDING,
325 
326     // The user was doing touch exploration, started split tap, but lifted the
327     // touch exploration finger. Once they remove all fingers, a touch release
328     // will go through.
329     TOUCH_RELEASE_PENDING,
330 
331     // We're in touch exploration mode. Anything other than the first finger
332     // is ignored, and movements of the first finger are rewritten as mouse
333     // move events. This mode is entered if a single finger is pressed and
334     // after the grace period the user hasn't added a second finger or
335     // moved the finger outside of the slop region. We'll stay in this
336     // mode until all fingers are lifted.
337     TOUCH_EXPLORATION,
338 
339     // If the user moves their finger faster than the threshold velocity after a
340     // single tap, the touch events that follow will be translated into gesture
341     // events. If the user successfully completes a gesture within the grace
342     // period, the gesture will be interpreted and used to control the UI via
343     // discrete actions - currently by synthesizing key events corresponding to
344     // each gesture Otherwise, the collected gestures are discarded and the
345     // state changes to touch_exploration.
346     GESTURE_IN_PROGRESS,
347 
348     // The user was in touch exploration, but has placed down another finger.
349     // If the user releases the second finger, a touch press and release
350     // will go through at the last touch explore location. If the user
351     // releases the touch explore finger, the touch press and release will
352     // still go through once the split tap finger is also lifted. If any
353     // fingers pressed past the first two, the touch press is cancelled and
354     // the user enters the wait state for the fingers to be removed.
355     TOUCH_EXPLORE_SECOND_PRESS,
356 
357     // After the user double taps and holds with a single finger, all events
358     // for that finger are passed through, displaced by an offset. Adding
359     // extra fingers has no effect. This state is left when the user removes
360     // all fingers.
361     ONE_FINGER_PASSTHROUGH,
362 
363     // If the user added another finger in SINGLE_TAP_PRESSED, or if the user
364     // has multiple fingers fingers down in any other state between
365     // passthrough, touch exploration, and gestures, they must release
366     // all fingers before completing any more actions. This state is
367     // generally useful for developing new features, because it creates a
368     // simple way to handle a dead end in user flow.
369     WAIT_FOR_NO_FINGERS,
370 
371     // If the user taps the screen with two fingers and releases both fingers
372     // before the grace period has passed, spoken feedback will be silenced.
373     TWO_FINGER_TAP,
374   };
375 
376   enum AnchorPointState {
377     ANCHOR_POINT_NONE,
378     ANCHOR_POINT_FROM_TOUCH_EXPLORATION,
379     ANCHOR_POINT_EXPLICITLY_SET
380   };
381 
382   enum ScreenLocation {
383     // Hot "edges" of the screen are each represented by a respective bit.
384     NO_EDGE = 0,
385     RIGHT_EDGE = 1 << 0,
386     TOP_EDGE = 1 << 1,
387     LEFT_EDGE = 1 << 2,
388     BOTTOM_EDGE = 1 << 3,
389   };
390 
391   // Given a point, if it is within the given inset of an edge, returns the
392   // edge. If it is within the given inset of two edges, returns an int with
393   // both bits that represent the respective edges turned on. Otherwise returns
394   // SCREEN_CENTER.
395   int FindEdgesWithinInset(gfx::Point point,
396                            float horiz_inset,
397                            float vert_inset);
398 
399   // Set the state and modifies any variables related to the state change.
400   // (e.g. resetting the gesture provider).
401   void SetState(State new_state, const char* function_name);
402 
403   void VlogState(const char* function_name);
404 
405   void VlogEvent(const ui::TouchEvent& event, const char* function_name);
406 
407   // Gets enum name from integer value.
408   const char* EnumStateToString(State state);
409 
410   aura::Window* root_window_;
411 
412   // Handles accessibility gestures. Not owned.
413   TouchExplorationControllerDelegate* delegate_;
414 
415   // Handles earcons. Not owned.
416   AccessibilitySoundPlayer* accessibility_sound_player_;
417 
418   // A set of touch ids for fingers currently touching the screen.
419   std::vector<int> current_touch_ids_;
420 
421   // Map of touch ids to their last known location.
422   std::map<int, gfx::PointF> touch_locations_;
423 
424   // The current state.
425   State state_;
426 
427   // A copy of the event from the initial touch press.
428   std::unique_ptr<ui::TouchEvent> initial_press_;
429   Continuation initial_press_continuation_;
430 
431   // The timestamp of the most recent press event for the main touch id.
432   // The difference between this and |initial_press_->time_stamp| is that
433   // |most_recent_press_timestamp_| is reset in a double-tap.
434   base::TimeTicks most_recent_press_timestamp_;
435 
436   // Map of touch ids to where its initial press occurred relative to the
437   // screen.
438   std::map<int, gfx::Point> initial_presses_;
439 
440   // In one finger passthrough, the touch is displaced relative to the
441   // last touch exploration location.
442   gfx::Vector2dF passthrough_offset_;
443 
444   // Stores the most recent event from a finger that is currently not
445   // sending events through, but might in the future (e.g. before a finger
446   // enters double-tap-hold passthrough, we need to update its location.)
447   std::unique_ptr<ui::TouchEvent> last_unused_finger_event_;
448   Continuation last_unused_finger_continuation_;
449 
450   // The anchor point used as the location of a synthesized tap when the
451   // user double-taps anywhere on the screen, and similarly the initial
452   // point used when the user double-taps, holds, and drags. This can be
453   // set either via touch exploration, or by a call to
454   // SetTouchAccessibilityAnchorPoint when focus moves due to something other
455   // than touch exploration.
456   gfx::PointF anchor_point_dip_;
457 
458   // The current state of the anchor point.
459   AnchorPointState anchor_point_state_;
460 
461   // The last touch exploration event.
462   std::unique_ptr<ui::TouchEvent> last_touch_exploration_;
463 
464   // A timer that fires after the double-tap delay.
465   base::OneShotTimer tap_timer_;
466 
467   // A timer to fire an indicating sound when sliding to change volume.
468   base::RepeatingTimer sound_timer_;
469 
470   // A default gesture detector config, so we can share the same
471   // timeout and pixel slop constants.
472   ui::GestureDetector::Config gesture_detector_config_;
473 
474   // Gesture Handler to interpret the touch events.
475   std::unique_ptr<ui::GestureProviderAura> gesture_provider_;
476 
477   // The previous state entered.
478   State prev_state_;
479 
480   // A copy of the previous event passed.
481   std::unique_ptr<ui::TouchEvent> prev_event_;
482 
483   // This toggles whether DVLOGS are turned on or not.
484   bool DVLOG_on_;
485 
486   // LocatedEvents within this area should be left alone.
487   gfx::Rect exclude_bounds_;
488 
489   // Any touch exploration that both starts and ends (touch pressed, and
490   // released) within this rectangle, triggers a simulated single finger tap at
491   // the anchor point on release.
492   gfx::Rect lift_activation_bounds_;
493 
494   // Whether or not we've seen a touch press event yet.
495   bool seen_press_ = false;
496 
497   // The maximum touch points seen in the current gesture.
498   size_t max_gesture_touch_points_ = 0;
499 
500   // The horizontal and vertical insets for side gesture detection.
501   const int gesture_start_width_;
502   const int gesture_start_height_;
503 
504   // Whether or not to trigger pass through mode when touch events come
505   // in from the edges.
506   bool side_gesture_pass_through_;
507 
508   DISALLOW_COPY_AND_ASSIGN(TouchExplorationController);
509 };
510 
511 }  // namespace shell
512 }  // namespace chromecast
513 
514 #endif  // CHROMECAST_BROWSER_ACCESSIBILITY_TOUCH_EXPLORATION_CONTROLLER_H_
515