compositing: Fully implement pinch zoom (#40083)

This change adds a full implementation of pinch zoom, including
center-aware zooming. Before this kind of pinch zooming was only enabled
on OpenHarmony. Now all pinch zooms must come with a focal point, which
determines the center point of the zoom. This enables full pinch zoom on
Android and has OpenHarmony use the same system for pinch zoom.

Every WebView now has a `PinchZoom` which describes the viewport of the
pinch zoom and handles panning with proper chaining of zoom viewport
panning to scroll layer scrolling.  In addition, the collection of touch
actions is simplified by storing an array and turning each into a
ScrollZoomEvent when appropriate.

Caveats:
-  We've noticed some hard to diagnose bugs with clamping the panning
  viewport, but we'll tackle those later once we figure out how to
  reliably reproduce them.
- Keyboard scroll events currently do not properly pan the pinch zoom
   viewport. This will be handled in a followup.

Testing: There are currently no tests for this kind of touch interaction
as there's no way to read the pinch zoom from a WebView. It's processed
asynchronously. Once that API is added, we should be able to add some
simple tests, but many things are still unaccessible such as the pan
position in the pinch zoom viewport.
Fixes #4224.

Signed-off-by: Martin Robinson <mrobinson@igalia.com>
Co-authored-by: Rakhi Sharma <atbrakhi@igalia.com>
This commit is contained in:
Martin Robinson
2025-10-23 15:52:27 +02:00
committed by GitHub
parent 183f141a0e
commit 37ae099695
12 changed files with 312 additions and 223 deletions

View File

@@ -29,7 +29,7 @@ use embedder_traits::{
CompositorHitTestResult, InputEventAndId, InputEventId, InputEventResult,
ScreenshotCaptureError, ShutdownState, ViewportDetails,
};
use euclid::{Point2D, Scale, Size2D, Transform3D};
use euclid::{Point2D, Scale, Size2D};
use gleam::gl::RENDERER;
use image::RgbaImage;
use ipc_channel::ipc::{self, IpcSharedMemory};
@@ -48,7 +48,7 @@ use webrender::{
};
use webrender_api::units::{
DeviceIntPoint, DeviceIntRect, DevicePixel, DevicePoint, DeviceRect, LayoutPoint, LayoutRect,
LayoutSize, WorldPoint,
LayoutSize, LayoutTransform, WorldPoint,
};
use webrender_api::{
self, BuiltDisplayList, ColorF, DirtyRect, DisplayListPayload, DocumentId,
@@ -1013,16 +1013,25 @@ impl IOCompositor {
continue;
};
let device_pixels_per_page_pixel = webview_renderer.device_pixels_per_page_pixel().0;
let pinch_zoom_transform = webview_renderer.pinch_zoom().transform().to_untyped();
let device_pixels_per_page_pixel_not_including_pinch_zoom = webview_renderer
.device_pixels_per_page_pixel_not_including_pinch_zoom()
.get();
let transform = LayoutTransform::scale(
device_pixels_per_page_pixel_not_including_pinch_zoom,
device_pixels_per_page_pixel_not_including_pinch_zoom,
1.0,
)
.then(&LayoutTransform::from_untyped(
&pinch_zoom_transform.to_3d(),
));
let webview_reference_frame = builder.push_reference_frame(
LayoutPoint::zero(),
root_reference_frame,
TransformStyle::Flat,
PropertyBinding::Value(Transform3D::scale(
device_pixels_per_page_pixel,
device_pixels_per_page_pixel,
1.,
)),
PropertyBinding::Value(transform),
ReferenceFrameKind::Transform {
is_2d_scale_translation: true,
should_snap: true,
@@ -1031,7 +1040,8 @@ impl IOCompositor {
SpatialTreeItemKey::new(0, 0),
);
let scaled_webview_rect = webview_renderer.rect / device_pixels_per_page_pixel;
let scaled_webview_rect = webview_renderer.rect /
webview_renderer.device_pixels_per_page_pixel_not_including_pinch_zoom();
builder.push_iframe(
LayoutRect::from_untyped(&scaled_webview_rect.to_untyped()),
LayoutRect::from_untyped(&scaled_webview_rect.to_untyped()),
@@ -1617,9 +1627,14 @@ impl IOCompositor {
}
}
pub fn pinch_zoom(&mut self, webview_id: WebViewId, pinch_zoom_delta: f32) {
pub fn pinch_zoom(
&mut self,
webview_id: WebViewId,
pinch_zoom_delta: f32,
center: DevicePoint,
) {
if let Some(webview_renderer) = self.webview_renderers.get_mut(webview_id) {
webview_renderer.pinch_zoom(pinch_zoom_delta);
webview_renderer.adjust_pinch_zoom(pinch_zoom_delta, center);
}
}

View File

@@ -22,6 +22,7 @@ pub use crate::compositor::{IOCompositor, WebRenderDebugOption};
mod tracing;
mod compositor;
mod pinch_zoom;
mod refresh_driver;
mod render_notifier;
mod screenshot;

View File

@@ -0,0 +1,147 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use euclid::{Point2D, Rect, Scale, Transform2D, Vector2D};
use webrender_api::ScrollLocation;
use webrender_api::units::{DevicePixel, DevicePoint, DeviceRect, DeviceSize, DeviceVector2D};
/// A [`PinchZoom`] describes the pinch zoom viewport of a `WebView`. This is used to
/// track the current pinch zoom transformation and to clamp all pinching and panning
/// to the unscaled `WebView` viewport.
#[derive(Clone, Copy, Debug, PartialEq)]
pub(crate) struct PinchZoom {
zoom_factor: f32,
transform: Transform2D<f32, DevicePixel, DevicePixel>,
unscaled_viewport_size: DeviceSize,
}
impl PinchZoom {
pub(crate) fn new(webview_rect: DeviceRect) -> Self {
Self {
zoom_factor: 1.0,
unscaled_viewport_size: webview_rect.size(),
transform: Transform2D::identity(),
}
}
pub(crate) fn transform(&self) -> Transform2D<f32, DevicePixel, DevicePixel> {
self.transform
}
pub(crate) fn zoom_factor(&self) -> Scale<f32, DevicePixel, DevicePixel> {
Scale::new(self.zoom_factor)
}
fn set_transform(&mut self, transform: Transform2D<f32, DevicePixel, DevicePixel>) {
let rect = Rect::new(
Point2D::origin(),
self.unscaled_viewport_size.to_vector().to_size(),
)
.cast_unit();
let mut rect = transform
.inverse()
.expect("Should always be able to invert provided transform")
.outer_transformed_rect(&rect);
rect.origin = rect.origin.clamp(
Point2D::origin(),
(self.unscaled_viewport_size - rect.size)
.to_vector()
.to_point(),
);
let scale = self.unscaled_viewport_size.width / rect.width();
self.transform = Transform2D::identity()
.then_translate(Vector2D::new(-rect.origin.x, -rect.origin.y))
.then_scale(scale, scale);
}
pub(crate) fn zoom(&mut self, magnification: f32, new_center: DevicePoint) {
const MINIMUM_PINCH_ZOOM: f32 = 1.0;
const MAXIMUM_PINCH_ZOOM: f32 = 10.0;
let new_factor =
(self.zoom_factor * magnification).clamp(MINIMUM_PINCH_ZOOM, MAXIMUM_PINCH_ZOOM);
let old_factor = std::mem::replace(&mut self.zoom_factor, new_factor);
if self.zoom_factor <= 1.0 {
self.transform = Transform2D::identity();
return;
}
let magnification = self.zoom_factor / old_factor;
let transform = self
.transform
.then_translate(Vector2D::new(-new_center.x, -new_center.y))
.then_scale(magnification, magnification)
.then_translate(Vector2D::new(new_center.x, new_center.y));
self.set_transform(transform);
}
/// Pan the pinch zoom viewoprt by the given [`ScrollLocation`] and if it is a delta,
/// modify the delta to reflect the remaining unused scroll delta.
pub(crate) fn pan(&mut self, scroll_location: &mut ScrollLocation) {
// TODO: The delta passed help in `ScrollLocation` is a LayoutVector2D, but is actually
// in DevicePixels! This should reflect reality.
match scroll_location {
ScrollLocation::Delta(delta) => {
let remaining =
self.pan_with_delta(DeviceScroll::Delta(DeviceVector2D::new(delta.x, delta.y)));
*delta = Vector2D::new(remaining.x, remaining.y)
},
ScrollLocation::Start => {
self.pan_with_delta(DeviceScroll::Start);
},
ScrollLocation::End => {
self.pan_with_delta(DeviceScroll::End);
},
}
}
/// Pan the pinch zoom viewport by the given delta and return the remaining device
/// pixel value that was unused.
fn pan_with_delta(&mut self, scroll: DeviceScroll) -> DeviceVector2D {
let current_viewport = Rect::new(
Point2D::origin(),
self.unscaled_viewport_size.to_vector().to_size(),
);
let layout_viewport_in_device_pixels =
self.transform.outer_transformed_rect(&current_viewport);
let max_viewport_offset = -(layout_viewport_in_device_pixels.size -
self.unscaled_viewport_size.to_vector().to_size());
let max_delta = layout_viewport_in_device_pixels.origin - max_viewport_offset;
let delta = match scroll {
DeviceScroll::Delta(delta) => delta,
DeviceScroll::Start => DeviceVector2D::new(0.0, max_delta.y),
DeviceScroll::End => {
DeviceVector2D::new(0.0, -layout_viewport_in_device_pixels.origin.y)
},
};
let mut remaining = Vector2D::zero();
if delta.x < 0.0 {
remaining.x = (delta.x - layout_viewport_in_device_pixels.origin.x).min(0.0);
}
if delta.y < 0.0 {
remaining.y = (delta.y - layout_viewport_in_device_pixels.origin.y).min(0.0);
}
if delta.x > 0.0 {
remaining.x = (delta.x - max_delta.x).max(0.0);
}
if delta.y > 0.0 {
remaining.y = (delta.y - max_delta.y).max(0.0);
}
self.set_transform(
self.transform
.then_translate(Vector2D::new(-delta.x, -delta.y)),
);
remaining
}
}
enum DeviceScroll {
Delta(DeviceVector2D),
Start,
End,
}

View File

@@ -10,12 +10,13 @@ use euclid::{Point2D, Scale, Vector2D};
use log::{debug, error, warn};
use rustc_hash::FxHashMap;
use style_traits::CSSPixel;
use webrender_api::ScrollLocation;
use webrender_api::units::{DeviceIntPoint, DevicePixel, DevicePoint, LayoutVector2D};
use self::TouchSequenceState::*;
use crate::IOCompositor;
use crate::refresh_driver::RefreshDriverObserver;
use crate::webview_renderer::WebViewRenderer;
use crate::webview_renderer::{ScrollEvent, ScrollZoomEvent, WebViewRenderer};
/// An ID for a sequence of touch events between a `Down` and the `Up` or `Cancel` event.
/// The ID is the same for all events between `Down` and `Up` or `Cancel`
@@ -106,7 +107,7 @@ pub struct TouchSequenceInfo {
/// this requires some additional work to handle the merging of pending
/// touch move events. Presumably if we keep a history of previous touch points,
/// this would allow a better fling algorithm and easier merging of zoom events.
pending_touch_move_action: Option<TouchMoveAction>,
pending_touch_move_actions: Vec<ScrollZoomEvent>,
/// Cache for the last touch hit test result.
hit_test_result_cache: Option<HitTestResultCache>,
}
@@ -126,40 +127,9 @@ impl TouchSequenceInfo {
(distance, center)
}
fn update_pending_touch_move_action(&mut self, action: TouchMoveAction) {
fn add_pending_touch_move_action(&mut self, action: ScrollZoomEvent) {
debug_assert!(self.prevent_move == TouchMoveAllowed::Pending);
if let Some(pre_action) = self.pending_touch_move_action {
let combine_action = match (pre_action, action) {
(TouchMoveAction::NoAction, _) | (_, TouchMoveAction::NoAction) => action,
// Combine touch move action.
(TouchMoveAction::Scroll(delta, point), TouchMoveAction::Scroll(delta_new, _)) => {
TouchMoveAction::Scroll(delta + delta_new, point)
},
(
TouchMoveAction::Scroll(delta, _),
TouchMoveAction::Zoom(magnification, scroll_delta),
) |
(
TouchMoveAction::Zoom(magnification, scroll_delta),
TouchMoveAction::Scroll(delta, _),
) => {
// Todo: It's unclear what the best action would be. Should we keep both
// scroll and zoom?
TouchMoveAction::Zoom(magnification, delta + scroll_delta)
},
(
TouchMoveAction::Zoom(magnification, scroll_delta),
TouchMoveAction::Zoom(magnification_new, scroll_delta_new),
) => TouchMoveAction::Zoom(
magnification * magnification_new,
scroll_delta + scroll_delta_new,
),
};
self.pending_touch_move_action = Some(combine_action);
} else {
self.pending_touch_move_action = Some(action);
}
self.pending_touch_move_actions.push(action);
}
/// Returns true when all touch events of a sequence have been received.
@@ -183,16 +153,7 @@ impl TouchSequenceInfo {
/// An action that can be immediately performed in response to a touch move event
/// without waiting for script.
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TouchMoveAction {
/// Scroll by the provided offset.
Scroll(Vector2D<f32, DevicePixel>, DevicePoint),
/// Zoom by a magnification factor and scroll by the provided offset.
Zoom(f32, Vector2D<f32, DevicePixel>),
/// Don't do anything.
NoAction,
}
#[derive(Clone, Copy, Debug)]
pub struct TouchPoint {
pub id: TouchId,
pub point: Point2D<f32, DevicePixel>,
@@ -249,7 +210,7 @@ impl TouchHandler {
handling_touch_move: false,
prevent_click: false,
prevent_move: TouchMoveAllowed::Pending,
pending_touch_move_action: None,
pending_touch_move_actions: vec![],
hit_test_result_cache: None,
};
// We insert a simulated initial touch sequence, which is already finished,
@@ -310,26 +271,26 @@ impl TouchHandler {
}
}
pub(crate) fn pending_touch_move_action(
pub(crate) fn take_pending_touch_move_actions(
&mut self,
sequence_id: TouchSequenceId,
) -> Option<TouchMoveAction> {
match self.touch_sequence_map.get(&sequence_id) {
Some(sequence) => sequence.pending_touch_move_action,
None => None,
}
) -> Vec<ScrollZoomEvent> {
self.touch_sequence_map
.get_mut(&sequence_id)
.map(|sequence| std::mem::take(&mut sequence.pending_touch_move_actions))
.unwrap_or_default()
}
pub(crate) fn remove_pending_touch_move_action(&mut self, sequence_id: TouchSequenceId) {
pub(crate) fn remove_pending_touch_move_actions(&mut self, sequence_id: TouchSequenceId) {
if let Some(sequence) = self.touch_sequence_map.get_mut(&sequence_id) {
sequence.pending_touch_move_action = None;
sequence.pending_touch_move_actions = Vec::new();
}
}
// try to remove touch sequence, if touch sequence end and not has pending action.
pub(crate) fn try_remove_touch_sequence(&mut self, sequence_id: TouchSequenceId) {
if let Some(sequence) = self.touch_sequence_map.get(&sequence_id) {
if sequence.pending_touch_move_action.is_none() && sequence.state == Finished {
if sequence.pending_touch_move_actions.is_empty() && sequence.state == Finished {
self.touch_sequence_map.remove(&sequence_id);
}
}
@@ -381,7 +342,7 @@ impl TouchHandler {
handling_touch_move: false,
prevent_click: false,
prevent_move: TouchMoveAllowed::Pending,
pending_touch_move_action: None,
pending_touch_move_actions: vec![],
hit_test_result_cache: None,
},
);
@@ -432,13 +393,11 @@ impl TouchHandler {
&mut self,
id: TouchId,
point: Point2D<f32, DevicePixel>,
) -> TouchMoveAction {
) -> Option<ScrollZoomEvent> {
// As `TouchHandler` is per `WebViewRenderer` which is per `WebView` we might get a Touch Sequence Move that
// started with a down on a different webview. As the touch_sequence id is only changed on touch_down this
// move event gets a touch id which is already cleaned up.
let Some(touch_sequence) = self.try_get_current_touch_sequence_mut() else {
return TouchMoveAction::NoAction;
};
let touch_sequence = self.try_get_current_touch_sequence_mut()?;
let idx = match touch_sequence
.active_touch_points
.iter_mut()
@@ -447,7 +406,7 @@ impl TouchHandler {
Some(i) => i,
None => {
error!("Got a touchmove event for a non-active touch point");
return TouchMoveAction::NoAction;
return None;
},
};
let old_point = touch_sequence.active_touch_points[idx].point;
@@ -464,7 +423,11 @@ impl TouchHandler {
touch_sequence.active_touch_points[idx].point = point;
// Scroll offsets are opposite to the direction of finger motion.
TouchMoveAction::Scroll(-delta, point)
Some(ScrollZoomEvent::Scroll(ScrollEvent {
scroll_location: ScrollLocation::Delta(-delta.cast_unit()),
cursor: point.to_i32(),
event_count: 1,
}))
} else if delta.x.abs() > TOUCH_PAN_MIN_SCREEN_PX ||
delta.y.abs() > TOUCH_PAN_MIN_SCREEN_PX
{
@@ -477,11 +440,15 @@ impl TouchHandler {
touch_sequence.active_touch_points[idx].point = point;
// Scroll offsets are opposite to the direction of finger motion.
TouchMoveAction::Scroll(-delta, point)
Some(ScrollZoomEvent::Scroll(ScrollEvent {
scroll_location: ScrollLocation::Delta(-delta.cast_unit()),
cursor: point.to_i32(),
event_count: 1,
}))
} else {
// We don't update the touchpoint, so multiple small moves can
// accumulate and merge into a larger move.
TouchMoveAction::NoAction
None
}
},
2 => {
@@ -490,34 +457,31 @@ impl TouchHandler {
delta.y.abs() > TOUCH_PAN_MIN_SCREEN_PX
{
touch_sequence.state = Pinching;
let (d0, c0) = touch_sequence.pinch_distance_and_center();
let (d0, _) = touch_sequence.pinch_distance_and_center();
// update the touch point with the enough distance or pinching.
touch_sequence.active_touch_points[idx].point = point;
let (d1, c1) = touch_sequence.pinch_distance_and_center();
let magnification = d1 / d0;
let scroll_delta = c1 - c0 * Scale::new(magnification);
// Scroll offsets are opposite to the direction of finger motion.
TouchMoveAction::Zoom(magnification, -scroll_delta)
Some(ScrollZoomEvent::PinchZoom(d1 / d0, c1))
} else {
// We don't update the touchpoint, so multiple small moves can
// accumulate and merge into a larger move.
TouchMoveAction::NoAction
None
}
},
_ => {
touch_sequence.active_touch_points[idx].point = point;
touch_sequence.state = MultiTouch;
TouchMoveAction::NoAction
None
},
};
// If the touch action is not `NoAction` and the first move has not been processed,
// set pending_touch_move_action.
if TouchMoveAction::NoAction != action &&
touch_sequence.prevent_move == TouchMoveAllowed::Pending
{
touch_sequence.update_pending_touch_move_action(action);
if let Some(action) = action {
if touch_sequence.prevent_move == TouchMoveAllowed::Pending {
touch_sequence.add_pending_touch_move_action(action);
}
}
action

View File

@@ -18,35 +18,37 @@ use embedder_traits::{
InputEventResult, MouseButton, MouseButtonAction, MouseButtonEvent, MouseMoveEvent,
ScrollEvent as EmbedderScrollEvent, ShutdownState, TouchEvent, TouchEventType, ViewportDetails,
};
use euclid::{Point2D, Scale, Vector2D};
use euclid::{Scale, Vector2D};
use log::{debug, warn};
use malloc_size_of::MallocSizeOf;
use rustc_hash::FxHashMap;
use servo_geometry::DeviceIndependentPixel;
use style_traits::{CSSPixel, PinchZoomFactor};
use style_traits::CSSPixel;
use webrender_api::units::{DeviceIntPoint, DevicePixel, DevicePoint, DeviceRect, LayoutVector2D};
use webrender_api::{ExternalScrollId, ScrollLocation};
use crate::compositor::{PipelineDetails, ServoRenderer};
use crate::pinch_zoom::PinchZoom;
use crate::touch::{
FlingRefreshDriverObserver, PendingTouchInputEvent, TouchHandler, TouchMoveAction,
TouchMoveAllowed, TouchSequenceState,
FlingRefreshDriverObserver, PendingTouchInputEvent, TouchHandler, TouchMoveAllowed,
TouchSequenceState,
};
#[derive(Clone, Copy)]
struct ScrollEvent {
pub(crate) struct ScrollEvent {
/// Scroll by this offset, or to Start or End
scroll_location: ScrollLocation,
pub scroll_location: ScrollLocation,
/// Apply changes to the frame at this location
cursor: DeviceIntPoint,
pub cursor: DeviceIntPoint,
/// The number of OS events that have been coalesced together into this one event.
event_count: u32,
pub event_count: u32,
}
#[derive(Clone, Copy)]
enum ScrollZoomEvent {
/// A pinch zoom event that magnifies the view by the given factor.
PinchZoom(f32),
pub(crate) enum ScrollZoomEvent {
/// A pinch zoom event that magnifies the view by the given factor from the given
/// center point.
PinchZoom(f32, DevicePoint),
/// A scroll event that scrolls the scroll node at the given location by the
/// given amount.
Scroll(ScrollEvent),
@@ -89,8 +91,9 @@ pub(crate) struct WebViewRenderer {
touch_handler: TouchHandler,
/// "Desktop-style" zoom that resizes the viewport to fit the window.
pub page_zoom: Scale<f32, CSSPixel, DeviceIndependentPixel>,
/// "Mobile-style" zoom that does not reflow the page.
pinch_zoom: PinchZoomFactor,
/// "Mobile-style" zoom that does not reflow the page. When there is no [`PinchZoom`] a
/// zoom factor of 1.0 is implied and the [`PinchZoom::transform`] will be the identity.
pinch_zoom: PinchZoom,
/// The HiDPI scale factor for the `WebView` associated with this renderer. This is controlled
/// by the embedding layer.
hidpi_scale_factor: Scale<f32, DeviceIndependentPixel, DevicePixel>,
@@ -110,17 +113,18 @@ impl WebViewRenderer {
) -> Self {
let hidpi_scale_factor = viewport_details.hidpi_scale_factor;
let size = viewport_details.size * viewport_details.hidpi_scale_factor;
let rect = DeviceRect::from_origin_and_size(DevicePoint::origin(), size);
Self {
id: renderer_webview.id(),
webview: renderer_webview,
root_pipeline_id: None,
rect: DeviceRect::from_origin_and_size(DevicePoint::origin(), size),
rect,
pipelines: Default::default(),
touch_handler: TouchHandler::new(),
global,
pending_scroll_zoom_events: Default::default(),
page_zoom: DEFAULT_PAGE_ZOOM,
pinch_zoom: PinchZoomFactor::new(1.0),
pinch_zoom: PinchZoom::new(rect),
hidpi_scale_factor: Scale::new(hidpi_scale_factor.0),
animating: false,
viewport_description: None,
@@ -386,8 +390,8 @@ impl WebViewRenderer {
}
fn on_touch_move(&mut self, mut event: TouchEvent, id: InputEventId) {
let action: TouchMoveAction = self.touch_handler.on_touch_move(event.id, event.point);
if TouchMoveAction::NoAction != action {
let action = self.touch_handler.on_touch_move(event.id, event.point);
if let Some(action) = action {
// if first move processed and allowed, we directly process the move event,
// without waiting for the script handler.
if self
@@ -396,30 +400,7 @@ impl WebViewRenderer {
{
// https://w3c.github.io/touch-events/#cancelability
event.disable_cancelable();
match action {
TouchMoveAction::Scroll(delta, point) => self.on_scroll_window_event(
ScrollLocation::Delta(LayoutVector2D::from_untyped(delta.to_untyped())),
point.cast(),
),
TouchMoveAction::Zoom(zoom_delta, scroll_delta) => {
let cursor = Point2D::new(-1, -1); // Make sure this hits the base layer.
// The order of these events doesn't matter, because zoom is handled by
// a root display list and the scroll event here is handled by the scroll
// applied to the content display list.
self.pending_scroll_zoom_events
.push(ScrollZoomEvent::PinchZoom(zoom_delta));
self.pending_scroll_zoom_events
.push(ScrollZoomEvent::Scroll(ScrollEvent {
scroll_location: ScrollLocation::Delta(
LayoutVector2D::from_untyped(scroll_delta.to_untyped()),
),
cursor,
event_count: 1,
}));
},
_ => {},
}
self.pending_scroll_zoom_events.push(action);
}
// When the event is touchmove, if the script thread is processing the touch
// move event, we skip sending the event to the script thread.
@@ -468,7 +449,7 @@ impl WebViewRenderer {
self.touch_handler.prevent_click(sequence_id);
self.touch_handler.prevent_move(sequence_id);
self.touch_handler
.remove_pending_touch_move_action(sequence_id);
.remove_pending_touch_move_actions(sequence_id);
},
TouchEventType::Move => {
// script thread processed the touch move event, mark this false.
@@ -480,7 +461,7 @@ impl WebViewRenderer {
self.touch_handler
.set_handling_touch_move(self.touch_handler.current_sequence_id, false);
self.touch_handler
.remove_pending_touch_move_action(sequence_id);
.remove_pending_touch_move_actions(sequence_id);
}
},
TouchEventType::Up => {
@@ -522,7 +503,7 @@ impl WebViewRenderer {
// We could still have pending event handlers, so we remove the pending
// actions, and try to remove the touch sequence.
self.touch_handler
.remove_pending_touch_move_action(sequence_id);
.remove_pending_touch_move_actions(sequence_id);
self.touch_handler.try_remove_touch_sequence(sequence_id);
},
}
@@ -534,39 +515,10 @@ impl WebViewRenderer {
match event_type {
TouchEventType::Down => {},
TouchEventType::Move => {
if let Some(action) = self.touch_handler.pending_touch_move_action(sequence_id)
{
match action {
TouchMoveAction::Scroll(delta, point) => self.on_scroll_window_event(
ScrollLocation::Delta(LayoutVector2D::from_untyped(
delta.to_untyped(),
)),
point.cast(),
),
TouchMoveAction::Zoom(zoom_delta, scroll_delta) => {
let cursor = Point2D::new(-1, -1);
// Make sure this hits the base layer.
// The order of these events doesn't matter, because zoom is handled by
// a root display list and the scroll event here is handled by the scroll
// applied to the content display list.
self.pending_scroll_zoom_events
.push(ScrollZoomEvent::PinchZoom(zoom_delta));
self.pending_scroll_zoom_events
.push(ScrollZoomEvent::Scroll(ScrollEvent {
scroll_location: ScrollLocation::Delta(
LayoutVector2D::from_untyped(scroll_delta.to_untyped()),
),
cursor,
event_count: 1,
}));
},
TouchMoveAction::NoAction => {
// This shouldn't happen, but we can also just ignore it.
},
}
self.pending_scroll_zoom_events.extend(
self.touch_handler
.remove_pending_touch_move_action(sequence_id);
}
.take_pending_touch_move_actions(sequence_id),
);
self.touch_handler
.set_handling_touch_move(self.touch_handler.current_sequence_id, false);
if let Some(info) = self.touch_handler.get_touch_sequence_mut(sequence_id) {
@@ -616,7 +568,7 @@ impl WebViewRenderer {
},
TouchEventType::Cancel => {
self.touch_handler
.remove_pending_touch_move_action(sequence_id);
.remove_pending_touch_move_actions(sequence_id);
self.touch_handler.try_remove_touch_sequence(sequence_id);
},
}
@@ -686,14 +638,15 @@ impl WebViewRenderer {
return (PinchZoomResult::DidNotPinchZoom, None);
}
// Batch up all scroll events into one, or else we'll do way too much painting.
// Batch up all scroll events and changes to pinch zoom into a single change, or
// else we'll do way too much painting.
let mut combined_scroll_event: Option<ScrollEvent> = None;
let current_pinch_zoom = self.pinch_zoom_level().get();
let mut combined_pinch_zoom_delta = 1.0;
let mut new_pinch_zoom = self.pinch_zoom;
for scroll_event in self.pending_scroll_zoom_events.drain(..) {
match scroll_event {
ScrollZoomEvent::PinchZoom(pinch_zoom_delta) => {
combined_pinch_zoom_delta *= pinch_zoom_delta
ScrollZoomEvent::PinchZoom(factor, center) => {
new_pinch_zoom.zoom(factor, center);
},
ScrollZoomEvent::Scroll(scroll_event_info) => {
let combined_event = match combined_scroll_event.as_mut() {
@@ -716,9 +669,8 @@ impl WebViewRenderer {
let old_event_count = Scale::new(combined_event.event_count as f32);
combined_event.event_count += 1;
let new_event_count = Scale::new(combined_event.event_count as f32);
combined_event.scroll_location = ScrollLocation::Delta(
(old_delta * old_event_count + new_delta) / new_event_count,
);
let delta = (old_delta * old_event_count + new_delta) / new_event_count;
combined_event.scroll_location = ScrollLocation::Delta(delta);
},
(ScrollLocation::Start, _) | (ScrollLocation::End, _) => {
// Once we see Start or End, we shouldn't process any more events.
@@ -735,6 +687,13 @@ impl WebViewRenderer {
}
}
// When zoomed in via pinch zoom, first try to move the center of the zoom and use the rest
// of the delta for scrolling. This allows moving the zoomed into viewport around in the
// unzoomed viewport before actually scrolling the underlying layers.
if let Some(combined_scroll_event) = combined_scroll_event.as_mut() {
new_pinch_zoom.pan(&mut combined_scroll_event.scroll_location)
}
let scroll_result = combined_scroll_event.and_then(|combined_event| {
self.scroll_node_at_device_point(
combined_event.cursor.to_f32(),
@@ -751,10 +710,7 @@ impl WebViewRenderer {
);
}
let pinch_zoom_result =
self.set_pinch_zoom_level(current_pinch_zoom * combined_pinch_zoom_delta);
(pinch_zoom_result, scroll_result)
(self.set_pinch_zoom(new_pinch_zoom), scroll_result)
}
/// Perform a hit test at the given [`DevicePoint`] and apply the [`ScrollLocation`]
@@ -836,23 +792,17 @@ impl WebViewRenderer {
}
}
pub(crate) fn pinch_zoom_level(&self) -> Scale<f32, DevicePixel, DevicePixel> {
Scale::new(self.pinch_zoom.get())
pub(crate) fn pinch_zoom(&self) -> PinchZoom {
self.pinch_zoom
}
fn set_pinch_zoom_level(&mut self, requested_pinch_zoom: f32) -> PinchZoomResult {
const MINIMUM_PINCH_ZOOM: f32 = 1.0;
const MAXIMUM_PINCH_ZOOM: f32 = 10.0;
let new_pinch_zoom = requested_pinch_zoom.clamp(MINIMUM_PINCH_ZOOM, MAXIMUM_PINCH_ZOOM);
let old_zoom =
std::mem::replace(&mut self.pinch_zoom, PinchZoomFactor::new(new_pinch_zoom));
if old_zoom != self.pinch_zoom {
PinchZoomResult::DidPinchZoom
} else {
PinchZoomResult::DidNotPinchZoom
fn set_pinch_zoom(&mut self, requested_pinch_zoom: PinchZoom) -> PinchZoomResult {
if requested_pinch_zoom == self.pinch_zoom {
return PinchZoomResult::DidNotPinchZoom;
}
self.pinch_zoom = requested_pinch_zoom;
PinchZoomResult::DidPinchZoom
}
pub(crate) fn set_page_zoom(
@@ -877,7 +827,7 @@ impl WebViewRenderer {
.and_then(|pipeline_id| self.pipelines.get(&pipeline_id))
.and_then(|pipeline| pipeline.viewport_scale)
.unwrap_or_else(|| self.page_zoom * self.hidpi_scale_factor);
viewport_scale * self.pinch_zoom_level()
viewport_scale * self.pinch_zoom.zoom_factor()
}
/// The current viewport scale (hidpi scale and page zoom and not pinch
@@ -891,14 +841,16 @@ impl WebViewRenderer {
}
/// Adjust the pinch zoom of the [`WebView`] by the given zoom delta.
pub(crate) fn pinch_zoom(&mut self, pinch_zoom_delta: f32) {
pub(crate) fn adjust_pinch_zoom(&mut self, magnification: f32, center: DevicePoint) {
if self.global.borrow().shutdown_state() != ShutdownState::NotShuttingDown {
return;
}
if magnification == 1.0 {
return;
}
// TODO: Scroll to keep the center in view?
self.pending_scroll_zoom_events
.push(ScrollZoomEvent::PinchZoom(pinch_zoom_delta));
.push(ScrollZoomEvent::PinchZoom(magnification, center));
}
fn send_window_size_message(&self) {

View File

@@ -24,7 +24,7 @@ use servo_geometry::DeviceIndependentPixel;
use style_traits::CSSPixel;
use url::Url;
use webrender_api::ScrollLocation;
use webrender_api::units::{DeviceIntPoint, DevicePixel, DeviceRect};
use webrender_api::units::{DeviceIntPoint, DevicePixel, DevicePoint, DeviceRect};
use crate::clipboard_delegate::{ClipboardDelegate, DefaultClipboardDelegate};
use crate::javascript_evaluator::JavaScriptEvaluator;
@@ -547,11 +547,11 @@ impl WebView {
///
/// The final pinch zoom values will be clamped to reasonable defaults (currently to
/// the inclusive range [1.0, 10.0]).
pub fn pinch_zoom(&self, pinch_zoom_delta: f32) {
pub fn pinch_zoom(&self, pinch_zoom_delta: f32, center: DevicePoint) {
self.inner()
.compositor
.borrow_mut()
.pinch_zoom(self.id(), pinch_zoom_delta);
.pinch_zoom(self.id(), pinch_zoom_delta, center);
}
pub fn device_pixels_per_css_pixel(&self) -> Scale<f32, CSSPixel, DevicePixel> {

View File

@@ -758,7 +758,7 @@ impl WindowPortsMethods for Window {
)));
},
WindowEvent::PinchGesture { delta, .. } => {
webview.pinch_zoom(delta as f32 + 1.0);
webview.pinch_zoom(delta as f32 + 1.0, self.webview_relative_mouse_point.get());
},
WindowEvent::CloseRequested => {
state.servo().start_shutting_down();

View File

@@ -401,11 +401,11 @@ pub extern "C" fn Java_org_servo_servoview_JNIServo_pinchZoomStart<'local>(
mut env: JNIEnv<'local>,
_: JClass<'local>,
factor: jfloat,
x: jint,
y: jint,
x: jfloat,
y: jfloat,
) {
debug!("pinchZoomStart");
call(&mut env, |s| s.pinchzoom_start(factor, x as u32, y as u32));
call(&mut env, |s| s.pinchzoom_start(factor, x, y));
}
#[unsafe(no_mangle)]
@@ -413,11 +413,11 @@ pub extern "C" fn Java_org_servo_servoview_JNIServo_pinchZoom<'local>(
mut env: JNIEnv<'local>,
_: JClass<'local>,
factor: jfloat,
x: jint,
y: jint,
x: jfloat,
y: jfloat,
) {
debug!("pinchZoom");
call(&mut env, |s| s.pinchzoom(factor, x as u32, y as u32));
call(&mut env, |s| s.pinchzoom(factor, x, y));
}
#[unsafe(no_mangle)]
@@ -425,11 +425,11 @@ pub extern "C" fn Java_org_servo_servoview_JNIServo_pinchZoomEnd<'local>(
mut env: JNIEnv<'local>,
_: JClass<'local>,
factor: jfloat,
x: jint,
y: jint,
x: jfloat,
y: jfloat,
) {
debug!("pinchZoomEnd");
call(&mut env, |s| s.pinchzoom_end(factor, x as u32, y as u32));
call(&mut env, |s| s.pinchzoom_end(factor, x, y));
}
#[unsafe(no_mangle)]

View File

@@ -17,7 +17,7 @@ use servo::base::id::WebViewId;
use servo::ipc_channel::ipc::IpcSender;
use servo::servo_geometry::DeviceIndependentPixel;
use servo::webrender_api::ScrollLocation;
use servo::webrender_api::units::{DeviceIntRect, DeviceIntSize, DevicePixel};
use servo::webrender_api::units::{DeviceIntRect, DeviceIntSize, DevicePixel, DevicePoint};
use servo::{
AllowOrDenyRequest, ContextMenuResult, ImeEvent, InputEvent, InputMethodType, KeyboardEvent,
LoadStatus, MediaSessionActionType, MediaSessionEvent, MouseButton, MouseButtonAction,
@@ -852,22 +852,25 @@ impl RunningAppState {
/// Start pinchzoom.
/// x/y are pinch origin coordinates.
pub fn pinchzoom_start(&self, factor: f32, _x: u32, _y: u32) {
self.active_webview().pinch_zoom(factor);
pub fn pinchzoom_start(&self, factor: f32, x: f32, y: f32) {
self.active_webview()
.pinch_zoom(factor, DevicePoint::new(x, y));
self.perform_updates();
}
/// Pinchzoom.
/// x/y are pinch origin coordinates.
pub fn pinchzoom(&self, factor: f32, _x: u32, _y: u32) {
self.active_webview().pinch_zoom(factor);
pub fn pinchzoom(&self, factor: f32, x: f32, y: f32) {
self.active_webview()
.pinch_zoom(factor, DevicePoint::new(x, y));
self.perform_updates();
}
/// End pinchzoom.
/// x/y are pinch origin coordinates.
pub fn pinchzoom_end(&self, factor: f32, _x: u32, _y: u32) {
self.active_webview().pinch_zoom(factor);
pub fn pinchzoom_end(&self, factor: f32, x: f32, y: f32) {
self.active_webview()
.pinch_zoom(factor, DevicePoint::new(x, y));
self.perform_updates();
}

View File

@@ -54,11 +54,11 @@ public class JNIServo {
public native void touchCancel(float x, float y, int pointer_id);
public native void pinchZoomStart(float factor, int x, int y);
public native void pinchZoomStart(float factor, float x, float y);
public native void pinchZoom(float factor, int x, int y);
public native void pinchZoom(float factor, float x, float y);
public native void pinchZoomEnd(float factor, int x, int y);
public native void pinchZoomEnd(float factor, float x, float y);
public native void click(float x, float y);
@@ -122,4 +122,3 @@ public class JNIServo {
void onMediaSessionSetPositionState(float duration, float position, float playbackRate);
}
}

View File

@@ -136,15 +136,15 @@ public class Servo {
mRunCallback.inGLThread(() -> mJNI.touchCancel(x, y, pointerId));
}
public void pinchZoomStart(float factor, int x, int y) {
public void pinchZoomStart(float factor, float x, float y) {
mRunCallback.inGLThread(() -> mJNI.pinchZoomStart(factor, x, y));
}
public void pinchZoom(float factor, int x, int y) {
public void pinchZoom(float factor, float x, float y) {
mRunCallback.inGLThread(() -> mJNI.pinchZoom(factor, x, y));
}
public void pinchZoomEnd(float factor, int x, int y) {
public void pinchZoomEnd(float factor, float x, float y) {
mRunCallback.inGLThread(() -> mJNI.pinchZoomEnd(factor, x, y));
}

View File

@@ -55,6 +55,8 @@ public class ServoView extends SurfaceView
private int mCurX = 0;
private int mLastY = 0;
private int mCurY = 0;
private float mFocusX = 0;
private float mFocusY = 0;
private boolean mFlinging;
private ScaleGestureDetector mScaleGestureDetector;
private OverScroller mScroller;
@@ -179,7 +181,7 @@ public class ServoView extends SurfaceView
}
if (zoomNecessary) {
mServo.pinchZoom(mZoomFactor, 0, 0);
mServo.pinchZoom(mZoomFactor, mFocusX, mFocusY);
mZoomFactor = 1;
}
@@ -332,8 +334,10 @@ public class ServoView extends SurfaceView
public boolean onScaleBegin(ScaleGestureDetector detector) {
if (mScroller.isFinished()) {
mZoomFactor = detector.getScaleFactor();
mFocusX = detector.getFocusX();
mFocusY = detector.getFocusY();
mZooming = true;
mServo.pinchZoomStart(mZoomFactor, 0, 0);
mServo.pinchZoomStart(mZoomFactor, mFocusX, mFocusY);
startLooping();
return true;
} else {
@@ -344,14 +348,18 @@ public class ServoView extends SurfaceView
@Override
public boolean onScale(ScaleGestureDetector detector) {
mZoomFactor *= detector.getScaleFactor();
mFocusX = detector.getFocusX();
mFocusY = detector.getFocusY();
return true;
}
@Override
public void onScaleEnd(ScaleGestureDetector detector) {
mZoomFactor = detector.getScaleFactor();
mFocusX = detector.getFocusX();
mFocusY = detector.getFocusY();
mZooming = false;
mServo.pinchZoomEnd(mZoomFactor, 0, 0);
mServo.pinchZoomEnd(mZoomFactor, mFocusX, mFocusY);
}
private void initGestures(Context context) {