libservo: Move size handling to RenderContext from WindowMethods (#35621)

This is the first step toward removing `WindowMethods`, which will
gradually be integrated into the `WebView` and `WebViewDelegate`. Sizing
of the `WebView` is now handled by the a size associated with a
`RenderingContext`. `WebView`s will eventually just paint the entire
size of their `RenderingContext`. Notes:

- This is transitionary step so now there is a `WebView::resize` and a
  `WebView::move_resize`. The first is the future which will resize the
  `WebView` and its associated `RenderingContext`. The second is a
  function that the virtual `WebView`s that will soon be replaced by a
  the one-`WebView` per `WebView` model.
- We do not need to call `WebView::move_resize` at as much any longer
  because the default size of the `WebView` is to take up the whole
  `RenderingContext`.
- `SurfmanRenderingContext` is no longer exposed in the API, as a
  surfman context doesn't naturally have a size unless a surface is
  bound to it.

Signed-off-by: Martin Robinson <mrobinson@igalia.com>
This commit is contained in:
Martin Robinson 2025-02-25 16:03:53 +01:00 committed by GitHub
parent ebb19bcd60
commit 23524a5413
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 200 additions and 312 deletions

2
Cargo.lock generated
View file

@ -1038,6 +1038,7 @@ dependencies = [
"bitflags 2.8.0", "bitflags 2.8.0",
"compositing_traits", "compositing_traits",
"crossbeam-channel", "crossbeam-channel",
"dpi",
"embedder_traits", "embedder_traits",
"euclid", "euclid",
"fnv", "fnv",
@ -4342,6 +4343,7 @@ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"devtools", "devtools",
"devtools_traits", "devtools_traits",
"dpi",
"embedder_traits", "embedder_traits",
"env_logger 0.10.2", "env_logger 0.10.2",
"euclid", "euclid",

View file

@ -22,6 +22,7 @@ base = { workspace = true }
bitflags = { workspace = true } bitflags = { workspace = true }
compositing_traits = { workspace = true } compositing_traits = { workspace = true }
crossbeam-channel = { workspace = true } crossbeam-channel = { workspace = true }
dpi = { workspace = true }
embedder_traits = { workspace = true } embedder_traits = { workspace = true }
euclid = { workspace = true } euclid = { workspace = true }
fnv = { workspace = true } fnv = { workspace = true }

View file

@ -20,11 +20,12 @@ use compositing_traits::{
CompositionPipeline, CompositorMsg, CompositorReceiver, ConstellationMsg, SendableFrameTree, CompositionPipeline, CompositorMsg, CompositorReceiver, ConstellationMsg, SendableFrameTree,
}; };
use crossbeam_channel::Sender; use crossbeam_channel::Sender;
use dpi::PhysicalSize;
use embedder_traits::{ use embedder_traits::{
Cursor, InputEvent, MouseButton, MouseButtonAction, MouseButtonEvent, MouseMoveEvent, Cursor, InputEvent, MouseButton, MouseButtonAction, MouseButtonEvent, MouseMoveEvent,
ShutdownState, TouchEvent, TouchEventType, TouchId, ShutdownState, TouchEvent, TouchEventType, TouchId,
}; };
use euclid::{Point2D, Rect, Scale, Size2D, Transform3D, Vector2D}; use euclid::{Box2D, Point2D, Rect, Scale, Size2D, Transform3D, Vector2D};
use fnv::{FnvHashMap, FnvHashSet}; use fnv::{FnvHashMap, FnvHashSet};
use ipc_channel::ipc::{self, IpcSharedMemory}; use ipc_channel::ipc::{self, IpcSharedMemory};
use libc::c_void; use libc::c_void;
@ -41,7 +42,7 @@ use servo_geometry::DeviceIndependentPixel;
use style_traits::{CSSPixel, PinchZoomFactor}; use style_traits::{CSSPixel, PinchZoomFactor};
use webrender::{CaptureBits, RenderApi, Transaction}; use webrender::{CaptureBits, RenderApi, Transaction};
use webrender_api::units::{ use webrender_api::units::{
DeviceIntPoint, DeviceIntSize, DevicePixel, DevicePoint, DeviceRect, LayoutPoint, LayoutRect, DeviceIntPoint, DeviceIntRect, DevicePixel, DevicePoint, DeviceRect, LayoutPoint, LayoutRect,
LayoutSize, LayoutVector2D, WorldPoint, LayoutSize, LayoutVector2D, WorldPoint,
}; };
use webrender_api::{ use webrender_api::{
@ -946,10 +947,11 @@ impl IOCompositor {
); );
let scaled_viewport_size = let scaled_viewport_size =
self.embedder_coordinates.get_viewport().size().to_f32() / zoom_factor; self.rendering_context.size2d().to_f32().to_untyped() / zoom_factor;
let scaled_viewport_size = LayoutSize::from_untyped(scaled_viewport_size.to_untyped()); let scaled_viewport_rect = LayoutRect::from_origin_and_size(
let scaled_viewport_rect = LayoutPoint::zero(),
LayoutRect::from_origin_and_size(LayoutPoint::zero(), scaled_viewport_size); LayoutSize::from_untyped(scaled_viewport_size),
);
let root_clip_id = builder.define_clip_rect(zoom_reference_frame, scaled_viewport_rect); let root_clip_id = builder.define_clip_rect(zoom_reference_frame, scaled_viewport_rect);
let clip_chain_id = builder.define_clip_chain(None, [root_clip_id]); let clip_chain_id = builder.define_clip_chain(None, [root_clip_id]);
@ -1025,11 +1027,12 @@ impl IOCompositor {
"{:?}: Creating new webview with pipeline {:?}", "{:?}: Creating new webview with pipeline {:?}",
top_level_browsing_context_id, pipeline_id top_level_browsing_context_id, pipeline_id
); );
let size = self.rendering_context.size2d().to_f32();
if let Err(WebViewAlreadyExists(webview_id)) = self.global.webviews.add( if let Err(WebViewAlreadyExists(webview_id)) = self.global.webviews.add(
top_level_browsing_context_id, top_level_browsing_context_id,
WebView { WebView {
pipeline_id, pipeline_id,
rect: self.embedder_coordinates.get_viewport().to_f32(), rect: Box2D::from_origin_and_size(Point2D::origin(), size),
}, },
) { ) {
error!("{webview_id}: Creating webview that already exists"); error!("{webview_id}: Creating webview that already exists");
@ -1235,31 +1238,31 @@ impl IOCompositor {
self.embedder_coordinates = self.window.get_coordinates(); self.embedder_coordinates = self.window.get_coordinates();
} }
pub fn on_rendering_context_resized(&mut self) -> bool { pub fn resize_rendering_context(&mut self, new_size: PhysicalSize<u32>) -> bool {
if self.shutdown_state() != ShutdownState::NotShuttingDown { if self.shutdown_state() != ShutdownState::NotShuttingDown {
return false; return false;
} }
let old_coords = self.embedder_coordinates; let old_hidpi_factor = self.embedder_coordinates.hidpi_factor;
self.embedder_coordinates = self.window.get_coordinates(); self.embedder_coordinates = self.window.get_coordinates();
if self.embedder_coordinates.hidpi_factor == old_hidpi_factor &&
if self.embedder_coordinates.viewport != old_coords.viewport { self.rendering_context.size() == new_size
let mut transaction = Transaction::new();
let size = self.embedder_coordinates.get_viewport();
transaction.set_document_view(size);
self.rendering_context.resize(size.size().to_untyped());
self.global
.webrender_api
.send_transaction(self.webrender_document, transaction);
}
// A size change could also mean a resolution change.
if self.embedder_coordinates.hidpi_factor == old_coords.hidpi_factor &&
self.embedder_coordinates.viewport == old_coords.viewport
{ {
return false; return false;
} }
self.rendering_context.resize(new_size);
let mut transaction = Transaction::new();
let output_region = DeviceIntRect::new(
Point2D::zero(),
Point2D::new(new_size.width as i32, new_size.height as i32),
);
transaction.set_document_view(output_region);
self.global
.webrender_api
.send_transaction(self.webrender_document, transaction);
self.update_after_zoom_or_hidpi_change(); self.update_after_zoom_or_hidpi_change();
self.set_needs_repaint(RepaintReason::Resize); self.set_needs_repaint(RepaintReason::Resize);
true true
@ -2085,28 +2088,25 @@ impl IOCompositor {
) -> Result<Option<Image>, UnableToComposite> { ) -> Result<Option<Image>, UnableToComposite> {
self.render_inner()?; self.render_inner()?;
let size = self.embedder_coordinates.framebuffer.to_u32(); let size = self.rendering_context.size2d().to_i32();
let (x, y, width, height) = if let Some(rect) = page_rect { let rect = if let Some(rect) = page_rect {
let rect = self.device_pixels_per_page_pixel().transform_rect(&rect); let rect = self.device_pixels_per_page_pixel().transform_rect(&rect);
let x = rect.origin.x as i32; let x = rect.origin.x as i32;
// We need to convert to the bottom-left origin coordinate // We need to convert to the bottom-left origin coordinate
// system used by OpenGL // system used by OpenGL
let y = (size.height as f32 - rect.origin.y - rect.size.height) as i32; let y = (size.height as f32 - rect.origin.y - rect.size.height) as i32;
let w = rect.size.width as u32; let w = rect.size.width as i32;
let h = rect.size.height as u32; let h = rect.size.height as i32;
(x, y, w, h) DeviceIntRect::from_origin_and_size(Point2D::new(x, y), Size2D::new(w, h))
} else { } else {
(0, 0, size.width, size.height) DeviceIntRect::from_origin_and_size(Point2D::origin(), size)
}; };
Ok(self Ok(self
.rendering_context .rendering_context
.read_to_image(Rect::new( .read_to_image(rect)
Point2D::new(x as u32, y as u32),
Size2D::new(width, height),
))
.map(|image| Image { .map(|image| Image {
width: image.width(), width: image.width(),
height: image.height(), height: image.height(),
@ -2155,13 +2155,11 @@ impl IOCompositor {
|| { || {
trace!("Compositing"); trace!("Compositing");
let size =
DeviceIntSize::from_untyped(self.embedder_coordinates.framebuffer.to_untyped());
// Paint the scene. // Paint the scene.
// TODO(gw): Take notice of any errors the renderer returns! // TODO(gw): Take notice of any errors the renderer returns!
self.clear_background(); self.clear_background();
if let Some(webrender) = self.webrender.as_mut() { if let Some(webrender) = self.webrender.as_mut() {
let size = self.rendering_context.size2d().to_i32();
webrender.render(size, 0 /* buffer_age */).ok(); webrender.render(size, 0 /* buffer_age */).ok();
} }
}, },

View file

@ -10,7 +10,7 @@ use embedder_traits::{EventLoopWaker, MouseButton};
use euclid::Scale; use euclid::Scale;
use net::protocols::ProtocolRegistry; use net::protocols::ProtocolRegistry;
use servo_geometry::{DeviceIndependentIntRect, DeviceIndependentIntSize, DeviceIndependentPixel}; use servo_geometry::{DeviceIndependentIntRect, DeviceIndependentIntSize, DeviceIndependentPixel};
use webrender_api::units::{DeviceIntRect, DeviceIntSize, DevicePixel, DevicePoint}; use webrender_api::units::{DevicePixel, DevicePoint};
#[derive(Clone)] #[derive(Clone)]
pub enum MouseWindowEvent { pub enum MouseWindowEvent {
@ -86,94 +86,4 @@ pub struct EmbedderCoordinates {
pub available_screen_size: DeviceIndependentIntSize, pub available_screen_size: DeviceIndependentIntSize,
/// Position and size of the native window. /// Position and size of the native window.
pub window_rect: DeviceIndependentIntRect, pub window_rect: DeviceIndependentIntRect,
/// Size of the GL buffer in the window.
pub framebuffer: DeviceIntSize,
/// Coordinates of the document within the framebuffer.
pub viewport: DeviceIntRect,
}
impl EmbedderCoordinates {
/// Get the unflipped viewport rectangle for use with the WebRender API.
pub fn get_viewport(&self) -> DeviceIntRect {
self.viewport
}
/// Flip the given rect.
/// This should be used when drawing directly to the framebuffer with OpenGL commands.
pub fn flip_rect(&self, rect: &DeviceIntRect) -> DeviceIntRect {
let mut result = *rect;
let min_y = self.framebuffer.height - result.max.y;
let max_y = self.framebuffer.height - result.min.y;
result.min.y = min_y;
result.max.y = max_y;
result
}
/// Get the flipped viewport rectangle.
/// This should be used when drawing directly to the framebuffer with OpenGL commands.
pub fn get_flipped_viewport(&self) -> DeviceIntRect {
self.flip_rect(&self.get_viewport())
}
}
#[cfg(test)]
mod test {
use euclid::{Box2D, Point2D, Scale, Size2D};
use webrender_api::units::DeviceIntRect;
use super::EmbedderCoordinates;
#[test]
fn test() {
let screen_size = Size2D::new(1080, 720);
let viewport = Box2D::from_origin_and_size(Point2D::zero(), Size2D::new(800, 600));
let window_rect = Box2D::from_origin_and_size(Point2D::zero(), Size2D::new(800, 600));
let coordinates = EmbedderCoordinates {
hidpi_factor: Scale::new(1.),
screen_size,
available_screen_size: screen_size,
window_rect,
framebuffer: viewport.size(),
viewport,
};
// Check if viewport conversion is correct.
let viewport = DeviceIntRect::new(Point2D::new(0, 0), Point2D::new(800, 600));
assert_eq!(coordinates.get_viewport(), viewport);
assert_eq!(coordinates.get_flipped_viewport(), viewport);
// Check rects with different y positions inside the viewport.
let rect1 = DeviceIntRect::new(Point2D::new(0, 0), Point2D::new(800, 400));
let rect2 = DeviceIntRect::new(Point2D::new(0, 100), Point2D::new(800, 600));
let rect3 = DeviceIntRect::new(Point2D::new(0, 200), Point2D::new(800, 500));
assert_eq!(
coordinates.flip_rect(&rect1),
DeviceIntRect::new(Point2D::new(0, 200), Point2D::new(800, 600))
);
assert_eq!(
coordinates.flip_rect(&rect2),
DeviceIntRect::new(Point2D::new(0, 0), Point2D::new(800, 500))
);
assert_eq!(
coordinates.flip_rect(&rect3),
DeviceIntRect::new(Point2D::new(0, 100), Point2D::new(800, 400))
);
// Check rects with different x positions.
let rect1 = DeviceIntRect::new(Point2D::new(0, 0), Point2D::new(700, 400));
let rect2 = DeviceIntRect::new(Point2D::new(100, 100), Point2D::new(800, 600));
let rect3 = DeviceIntRect::new(Point2D::new(300, 200), Point2D::new(600, 500));
assert_eq!(
coordinates.flip_rect(&rect1),
DeviceIntRect::new(Point2D::new(0, 200), Point2D::new(700, 600))
);
assert_eq!(
coordinates.flip_rect(&rect2),
DeviceIntRect::new(Point2D::new(100, 0), Point2D::new(800, 500))
);
assert_eq!(
coordinates.flip_rect(&rect3),
DeviceIntRect::new(Point2D::new(300, 100), Point2D::new(600, 400))
);
}
} }

View file

@ -70,6 +70,7 @@ constellation = { path = "../constellation" }
crossbeam-channel = { workspace = true } crossbeam-channel = { workspace = true }
devtools = { path = "../devtools" } devtools = { path = "../devtools" }
devtools_traits = { workspace = true } devtools_traits = { workspace = true }
dpi = { workspace = true }
embedder_traits = { workspace = true } embedder_traits = { workspace = true }
env_logger = { workspace = true } env_logger = { workspace = true }
euclid = { workspace = true } euclid = { workspace = true }

View file

@ -102,8 +102,7 @@ use webgpu::swapchain::WGPUImageMap;
use webrender::{RenderApiSender, ShaderPrecacheFlags, UploadMethod, ONE_TIME_USAGE_HINT}; use webrender::{RenderApiSender, ShaderPrecacheFlags, UploadMethod, ONE_TIME_USAGE_HINT};
use webrender_api::{ColorF, DocumentId, FramePublishId}; use webrender_api::{ColorF, DocumentId, FramePublishId};
pub use webrender_traits::rendering_context::{ pub use webrender_traits::rendering_context::{
OffscreenRenderingContext, RenderingContext, SoftwareRenderingContext, SurfmanRenderingContext, OffscreenRenderingContext, RenderingContext, SoftwareRenderingContext, WindowRenderingContext,
WindowRenderingContext,
}; };
use webrender_traits::{ use webrender_traits::{
CrossProcessCompositorApi, WebrenderExternalImageHandlers, WebrenderExternalImageRegistry, CrossProcessCompositorApi, WebrenderExternalImageHandlers, WebrenderExternalImageRegistry,
@ -341,7 +340,7 @@ impl Servo {
let coordinates: compositing::windowing::EmbedderCoordinates = window.get_coordinates(); let coordinates: compositing::windowing::EmbedderCoordinates = window.get_coordinates();
let device_pixel_ratio = coordinates.hidpi_factor.get(); let device_pixel_ratio = coordinates.hidpi_factor.get();
let viewport_size = coordinates.viewport.size().to_f32() / device_pixel_ratio; let viewport_size = rendering_context.size2d();
let (mut webrender, webrender_api_sender) = { let (mut webrender, webrender_api_sender) = {
let mut debug_flags = webrender::DebugFlags::empty(); let mut debug_flags = webrender::DebugFlags::empty();
@ -408,7 +407,7 @@ impl Servo {
}; };
let webrender_api = webrender_api_sender.create_api(); let webrender_api = webrender_api_sender.create_api();
let webrender_document = webrender_api.add_document(coordinates.get_viewport().size()); let webrender_document = webrender_api.add_document(viewport_size.to_i32());
// Important that this call is done in a single-threaded fashion, we // Important that this call is done in a single-threaded fashion, we
// can't defer it after `create_constellation` has started. // can't defer it after `create_constellation` has started.
@ -472,8 +471,9 @@ impl Servo {
// The division by 1 represents the page's default zoom of 100%, // The division by 1 represents the page's default zoom of 100%,
// and gives us the appropriate CSSPixel type for the viewport. // and gives us the appropriate CSSPixel type for the viewport.
let scaled_viewport_size = viewport_size.to_f32().to_untyped() / device_pixel_ratio;
let window_size = WindowSizeData { let window_size = WindowSizeData {
initial_viewport: viewport_size / Scale::new(1.0), initial_viewport: scaled_viewport_size / Scale::new(1.0),
device_pixel_ratio: Scale::new(device_pixel_ratio), device_pixel_ratio: Scale::new(device_pixel_ratio),
}; };

View file

@ -11,6 +11,7 @@ use base::id::WebViewId;
use compositing::windowing::WebRenderDebugOption; use compositing::windowing::WebRenderDebugOption;
use compositing::IOCompositor; use compositing::IOCompositor;
use compositing_traits::ConstellationMsg; use compositing_traits::ConstellationMsg;
use dpi::PhysicalSize;
use embedder_traits::{ use embedder_traits::{
Cursor, InputEvent, LoadStatus, MediaSessionActionType, Theme, TouchEventType, Cursor, InputEvent, LoadStatus, MediaSessionActionType, Theme, TouchEventType,
TraversalDirection, TraversalDirection,
@ -366,11 +367,11 @@ impl WebView {
self.inner().compositor.borrow_mut().on_vsync(); self.inner().compositor.borrow_mut().on_vsync();
} }
pub fn notify_rendering_context_resized(&self) { pub fn resize(&self, new_size: PhysicalSize<u32>) {
self.inner() self.inner()
.compositor .compositor
.borrow_mut() .borrow_mut()
.on_rendering_context_resized(); .resize_rendering_context(new_size);
} }
pub fn notify_embedder_window_moved(&self) { pub fn notify_embedder_window_moved(&self) {

View file

@ -10,8 +10,8 @@ use std::num::NonZeroU32;
use std::rc::Rc; use std::rc::Rc;
use dpi::PhysicalSize; use dpi::PhysicalSize;
use euclid::default::{Rect, Size2D}; use euclid::default::{Rect, Size2D as UntypedSize2D};
use euclid::Point2D; use euclid::{Point2D, Size2D};
use gleam::gl::{self, Gl}; use gleam::gl::{self, Gl};
use glow::NativeFramebuffer; use glow::NativeFramebuffer;
use image::RgbaImage; use image::RgbaImage;
@ -23,6 +23,7 @@ use surfman::{
Adapter, Connection, Context, ContextAttributeFlags, ContextAttributes, Device, GLApi, Adapter, Connection, Context, ContextAttributeFlags, ContextAttributes, Device, GLApi,
NativeContext, NativeWidget, Surface, SurfaceAccess, SurfaceInfo, SurfaceTexture, SurfaceType, NativeContext, NativeWidget, Surface, SurfaceAccess, SurfaceInfo, SurfaceTexture, SurfaceType,
}; };
use webrender_api::units::{DeviceIntRect, DevicePixel};
/// The `RenderingContext` trait defines a set of methods for managing /// The `RenderingContext` trait defines a set of methods for managing
/// an OpenGL or GLES rendering context. /// an OpenGL or GLES rendering context.
@ -40,9 +41,16 @@ pub trait RenderingContext {
/// In a double-buffered [`RenderingContext`] this is expected to read from the back /// In a double-buffered [`RenderingContext`] this is expected to read from the back
/// buffer. That means that once Servo renders to the context, this should return those /// buffer. That means that once Servo renders to the context, this should return those
/// results, even before [`RenderingContext::present`] is called. /// results, even before [`RenderingContext::present`] is called.
fn read_to_image(&self, source_rectangle: Rect<u32>) -> Option<RgbaImage>; fn read_to_image(&self, source_rectangle: DeviceIntRect) -> Option<RgbaImage>;
/// Get the current size of this [`RenderingContext`].
fn size(&self) -> PhysicalSize<u32>;
/// Get the current size of this [`RenderingContext`] as [`Size2D`].
fn size2d(&self) -> Size2D<u32, DevicePixel> {
let size = self.size();
Size2D::new(size.width, size.height)
}
/// Resizes the rendering surface to the given size. /// Resizes the rendering surface to the given size.
fn resize(&self, size: Size2D<i32>); fn resize(&self, size: PhysicalSize<u32>);
/// Presents the rendered frame to the screen. In a double-buffered context, this would /// Presents the rendered frame to the screen. In a double-buffered context, this would
/// swap buffers. /// swap buffers.
fn present(&self); fn present(&self);
@ -54,7 +62,10 @@ pub trait RenderingContext {
fn gl_api(&self) -> Rc<dyn gleam::gl::Gl>; fn gl_api(&self) -> Rc<dyn gleam::gl::Gl>;
/// Creates a texture from a given surface and returns the surface texture, /// Creates a texture from a given surface and returns the surface texture,
/// the OpenGL texture object, and the size of the surface. Default to `None`. /// the OpenGL texture object, and the size of the surface. Default to `None`.
fn create_texture(&self, _surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> { fn create_texture(
&self,
_surface: Surface,
) -> Option<(SurfaceTexture, u32, UntypedSize2D<i32>)> {
None None
} }
/// Destroys the texture and returns the surface. Default to `None`. /// Destroys the texture and returns the surface. Default to `None`.
@ -75,7 +86,7 @@ pub trait RenderingContext {
/// The `SurfmanRenderingContext` struct encapsulates the necessary data and methods /// The `SurfmanRenderingContext` struct encapsulates the necessary data and methods
/// to interact with the Surfman library, including creating surfaces, binding surfaces, /// to interact with the Surfman library, including creating surfaces, binding surfaces,
/// resizing surfaces, presenting rendered frames, and managing the OpenGL context state. /// resizing surfaces, presenting rendered frames, and managing the OpenGL context state.
pub struct SurfmanRenderingContext { struct SurfmanRenderingContext {
gl: Rc<dyn Gl>, gl: Rc<dyn Gl>,
device: RefCell<Device>, device: RefCell<Device>,
context: RefCell<Context>, context: RefCell<Context>,
@ -148,7 +159,8 @@ impl SurfmanRenderingContext {
SwapChain::create_attached(device, context, SurfaceAccess::GPUOnly) SwapChain::create_attached(device, context, SurfaceAccess::GPUOnly)
} }
fn resize_surface(&self, size: Size2D<i32>) -> Result<(), Error> { fn resize_surface(&self, size: PhysicalSize<u32>) -> Result<(), Error> {
let size = Size2D::new(size.width as i32, size.height as i32);
let device = &mut self.device.borrow_mut(); let device = &mut self.device.borrow_mut();
let context = &mut self.context.borrow_mut(); let context = &mut self.context.borrow_mut();
@ -191,12 +203,6 @@ impl SurfmanRenderingContext {
.unwrap_or(None) .unwrap_or(None)
.and_then(|info| info.framebuffer_object) .and_then(|info| info.framebuffer_object)
} }
}
impl RenderingContext for SurfmanRenderingContext {
fn gl_api(&self) -> Rc<dyn gleam::gl::Gl> {
self.gl.clone()
}
fn prepare_for_rendering(&self) { fn prepare_for_rendering(&self) {
let framebuffer_id = self let framebuffer_id = self
@ -206,32 +212,23 @@ impl RenderingContext for SurfmanRenderingContext {
.bind_framebuffer(gleam::gl::FRAMEBUFFER, framebuffer_id); .bind_framebuffer(gleam::gl::FRAMEBUFFER, framebuffer_id);
} }
fn read_to_image(&self, source_rectangle: Rect<u32>) -> Option<RgbaImage> { fn read_to_image(&self, source_rectangle: DeviceIntRect) -> Option<RgbaImage> {
let framebuffer_id = self let framebuffer_id = self
.framebuffer() .framebuffer()
.map_or(0, |framebuffer| framebuffer.0.into()); .map_or(0, |framebuffer| framebuffer.0.into());
Framebuffer::read_framebuffer_to_image(&self.gl, framebuffer_id, source_rectangle) Framebuffer::read_framebuffer_to_image(&self.gl, framebuffer_id, source_rectangle)
} }
fn resize(&self, size: Size2D<i32>) {
if let Err(error) = self.resize_surface(size) {
warn!("Error resizing surface: {error:?}");
}
}
fn present(&self) {
if let Err(error) = self.present_bound_surface() {
warn!("Error presenting surface: {error:?}");
}
}
fn make_current(&self) -> Result<(), Error> { fn make_current(&self) -> Result<(), Error> {
let device = &self.device.borrow(); let device = &self.device.borrow();
let context = &mut self.context.borrow(); let context = &mut self.context.borrow();
device.make_context_current(context) device.make_context_current(context)
} }
fn create_texture(&self, surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> { fn create_texture(
&self,
surface: Surface,
) -> Option<(SurfaceTexture, u32, UntypedSize2D<i32>)> {
let device = &self.device.borrow(); let device = &self.device.borrow();
let context = &mut self.context.borrow_mut(); let context = &mut self.context.borrow_mut();
let SurfaceInfo { let SurfaceInfo {
@ -268,6 +265,7 @@ impl RenderingContext for SurfmanRenderingContext {
/// ///
/// The results of the render can be accessed via [`RenderingContext::read_to_image`]. /// The results of the render can be accessed via [`RenderingContext::read_to_image`].
pub struct SoftwareRenderingContext { pub struct SoftwareRenderingContext {
size: Cell<PhysicalSize<u32>>,
surfman_rendering_info: SurfmanRenderingContext, surfman_rendering_info: SurfmanRenderingContext,
swap_chain: SwapChain<Device>, swap_chain: SwapChain<Device>,
} }
@ -278,13 +276,15 @@ impl SoftwareRenderingContext {
let adapter = connection.create_software_adapter()?; let adapter = connection.create_software_adapter()?;
let surfman_rendering_info = SurfmanRenderingContext::new(&connection, &adapter)?; let surfman_rendering_info = SurfmanRenderingContext::new(&connection, &adapter)?;
let size = Size2D::new(size.width as i32, size.height as i32); let surfman_size = Size2D::new(size.width as i32, size.height as i32);
let surface = surfman_rendering_info.create_surface(SurfaceType::Generic { size })?; let surface =
surfman_rendering_info.create_surface(SurfaceType::Generic { size: surfman_size })?;
surfman_rendering_info.bind_surface(surface)?; surfman_rendering_info.bind_surface(surface)?;
surfman_rendering_info.make_current()?; surfman_rendering_info.make_current()?;
let swap_chain = surfman_rendering_info.create_attached_swap_chain()?; let swap_chain = surfman_rendering_info.create_attached_swap_chain()?;
Ok(SoftwareRenderingContext { Ok(SoftwareRenderingContext {
size: Cell::new(size),
surfman_rendering_info, surfman_rendering_info,
swap_chain, swap_chain,
}) })
@ -304,13 +304,24 @@ impl RenderingContext for SoftwareRenderingContext {
self.surfman_rendering_info.prepare_for_rendering(); self.surfman_rendering_info.prepare_for_rendering();
} }
fn read_to_image(&self, source_rectangle: Rect<u32>) -> Option<RgbaImage> { fn read_to_image(&self, source_rectangle: DeviceIntRect) -> Option<RgbaImage> {
self.surfman_rendering_info.read_to_image(source_rectangle) self.surfman_rendering_info.read_to_image(source_rectangle)
} }
fn resize(&self, size: Size2D<i32>) { fn size(&self) -> PhysicalSize<u32> {
self.size.get()
}
fn resize(&self, size: PhysicalSize<u32>) {
if self.size.get() == size {
return;
}
self.size.set(size);
let device = &mut self.surfman_rendering_info.device.borrow_mut(); let device = &mut self.surfman_rendering_info.device.borrow_mut();
let context = &mut self.surfman_rendering_info.context.borrow_mut(); let context = &mut self.surfman_rendering_info.context.borrow_mut();
let size = Size2D::new(size.width as i32, size.height as i32);
let _ = self.swap_chain.resize(device, context, size); let _ = self.swap_chain.resize(device, context, size);
} }
@ -331,7 +342,10 @@ impl RenderingContext for SoftwareRenderingContext {
self.surfman_rendering_info.gl.clone() self.surfman_rendering_info.gl.clone()
} }
fn create_texture(&self, surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> { fn create_texture(
&self,
surface: Surface,
) -> Option<(SurfaceTexture, u32, UntypedSize2D<i32>)> {
self.surfman_rendering_info.create_texture(surface) self.surfman_rendering_info.create_texture(surface)
} }
@ -352,17 +366,20 @@ impl RenderingContext for SoftwareRenderingContext {
/// ///
/// If you would like to paint to only a portion of the window, consider using /// If you would like to paint to only a portion of the window, consider using
/// [`OffscreenRenderingContext`] by calling [`WindowRenderingContext::offscreen_context`]. /// [`OffscreenRenderingContext`] by calling [`WindowRenderingContext::offscreen_context`].
pub struct WindowRenderingContext(SurfmanRenderingContext); pub struct WindowRenderingContext {
size: Cell<PhysicalSize<u32>>,
surfman_context: SurfmanRenderingContext,
}
impl WindowRenderingContext { impl WindowRenderingContext {
pub fn new( pub fn new(
display_handle: DisplayHandle, display_handle: DisplayHandle,
window_handle: WindowHandle, window_handle: WindowHandle,
size: &PhysicalSize<u32>, size: PhysicalSize<u32>,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let connection = Connection::from_display_handle(display_handle)?; let connection = Connection::from_display_handle(display_handle)?;
let adapter = connection.create_adapter()?; let adapter = connection.create_adapter()?;
let surfman_rendering_info = SurfmanRenderingContext::new(&connection, &adapter)?; let surfman_context = SurfmanRenderingContext::new(&connection, &adapter)?;
let native_widget = connection let native_widget = connection
.create_native_widget_from_window_handle( .create_native_widget_from_window_handle(
@ -371,22 +388,27 @@ impl WindowRenderingContext {
) )
.expect("Failed to create native widget"); .expect("Failed to create native widget");
let surface = let surface = surfman_context.create_surface(SurfaceType::Widget { native_widget })?;
surfman_rendering_info.create_surface(SurfaceType::Widget { native_widget })?; surfman_context.bind_surface(surface)?;
surfman_rendering_info.bind_surface(surface)?; surfman_context.make_current()?;
surfman_rendering_info.make_current()?;
Ok(Self(surfman_rendering_info)) Ok(Self {
size: Cell::new(size),
surfman_context,
})
} }
pub fn offscreen_context(self: &Rc<Self>, size: Size2D<u32>) -> OffscreenRenderingContext { pub fn offscreen_context(
self: &Rc<Self>,
size: PhysicalSize<u32>,
) -> OffscreenRenderingContext {
OffscreenRenderingContext::new(self.clone(), size) OffscreenRenderingContext::new(self.clone(), size)
} }
/// TODO: This can be removed when Servo switches fully to `glow.` /// TODO: This can be removed when Servo switches fully to `glow.`
pub fn get_proc_address(&self, name: &str) -> *const c_void { pub fn get_proc_address(&self, name: &str) -> *const c_void {
let device = &self.0.device.borrow(); let device = &self.surfman_context.device.borrow();
let context = &self.0.context.borrow(); let context = &self.surfman_context.context.borrow();
device.get_proc_address(context, name) device.get_proc_address(context, name)
} }
@ -395,8 +417,8 @@ impl WindowRenderingContext {
/// ///
/// TODO: This should be removed once `WebView`s can replace their `RenderingContext`s. /// TODO: This should be removed once `WebView`s can replace their `RenderingContext`s.
pub fn take_window(&self) -> Result<(), Error> { pub fn take_window(&self) -> Result<(), Error> {
let device = self.0.device.borrow_mut(); let device = self.surfman_context.device.borrow_mut();
let mut context = self.0.context.borrow_mut(); let mut context = self.surfman_context.context.borrow_mut();
let mut surface = device.unbind_surface_from_context(&mut context)?.unwrap(); let mut surface = device.unbind_surface_from_context(&mut context)?.unwrap();
device.destroy_surface(&mut context, &mut surface)?; device.destroy_surface(&mut context, &mut surface)?;
Ok(()) Ok(())
@ -409,10 +431,10 @@ impl WindowRenderingContext {
pub fn set_window( pub fn set_window(
&self, &self,
window_handle: WindowHandle, window_handle: WindowHandle,
size: &PhysicalSize<u32>, size: PhysicalSize<u32>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let mut device = self.0.device.borrow_mut(); let mut device = self.surfman_context.device.borrow_mut();
let mut context = self.0.context.borrow_mut(); let mut context = self.surfman_context.context.borrow_mut();
let native_widget = device let native_widget = device
.connection() .connection()
@ -437,56 +459,67 @@ impl WindowRenderingContext {
} }
pub fn surfman_details(&self) -> (RefMut<Device>, RefMut<Context>) { pub fn surfman_details(&self) -> (RefMut<Device>, RefMut<Context>) {
(self.0.device.borrow_mut(), self.0.context.borrow_mut()) (
self.surfman_context.device.borrow_mut(),
self.surfman_context.context.borrow_mut(),
)
} }
} }
impl RenderingContext for WindowRenderingContext { impl RenderingContext for WindowRenderingContext {
fn prepare_for_rendering(&self) { fn prepare_for_rendering(&self) {
self.0.prepare_for_rendering(); self.surfman_context.prepare_for_rendering();
} }
fn read_to_image(&self, source_rectangle: Rect<u32>) -> Option<RgbaImage> { fn read_to_image(&self, source_rectangle: DeviceIntRect) -> Option<RgbaImage> {
self.0.read_to_image(source_rectangle) self.surfman_context.read_to_image(source_rectangle)
} }
fn resize(&self, size: Size2D<i32>) { fn size(&self) -> PhysicalSize<u32> {
if let Err(error) = self.0.resize_surface(size) { self.size.get()
warn!("Error resizing surface: {error:?}"); }
fn resize(&self, size: PhysicalSize<u32>) {
match self.surfman_context.resize_surface(size) {
Ok(..) => self.size.set(size),
Err(error) => warn!("Error resizing surface: {error:?}"),
} }
} }
fn present(&self) { fn present(&self) {
if let Err(error) = self.0.present_bound_surface() { if let Err(error) = self.surfman_context.present_bound_surface() {
warn!("Error presenting surface: {error:?}"); warn!("Error presenting surface: {error:?}");
} }
} }
fn make_current(&self) -> Result<(), Error> { fn make_current(&self) -> Result<(), Error> {
self.0.make_current() self.surfman_context.make_current()
} }
#[allow(unsafe_code)] #[allow(unsafe_code)]
fn gl_api(&self) -> Rc<dyn gleam::gl::Gl> { fn gl_api(&self) -> Rc<dyn gleam::gl::Gl> {
self.0.gl.clone() self.surfman_context.gl.clone()
} }
fn create_texture(&self, surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> { fn create_texture(
self.0.create_texture(surface) &self,
surface: Surface,
) -> Option<(SurfaceTexture, u32, UntypedSize2D<i32>)> {
self.surfman_context.create_texture(surface)
} }
fn destroy_texture(&self, surface_texture: SurfaceTexture) -> Option<Surface> { fn destroy_texture(&self, surface_texture: SurfaceTexture) -> Option<Surface> {
self.0.destroy_texture(surface_texture) self.surfman_context.destroy_texture(surface_texture)
} }
fn connection(&self) -> Option<Connection> { fn connection(&self) -> Option<Connection> {
self.0.connection() self.surfman_context.connection()
} }
} }
struct Framebuffer { struct Framebuffer {
gl: Rc<dyn Gl>, gl: Rc<dyn Gl>,
size: Size2D<u32>, size: PhysicalSize<u32>,
framebuffer_id: gl::GLuint, framebuffer_id: gl::GLuint,
renderbuffer_id: gl::GLuint, renderbuffer_id: gl::GLuint,
texture_id: gl::GLuint, texture_id: gl::GLuint,
@ -510,7 +543,7 @@ impl Drop for Framebuffer {
} }
impl Framebuffer { impl Framebuffer {
fn new(gl: Rc<dyn Gl>, size: Size2D<u32>) -> Self { fn new(gl: Rc<dyn Gl>, size: PhysicalSize<u32>) -> Self {
let framebuffer_ids = gl.gen_framebuffers(1); let framebuffer_ids = gl.gen_framebuffers(1);
gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer_ids[0]); gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer_ids[0]);
@ -577,14 +610,14 @@ impl Framebuffer {
} }
} }
fn read_to_image(&self, source_rectangle: Rect<u32>) -> Option<RgbaImage> { fn read_to_image(&self, source_rectangle: DeviceIntRect) -> Option<RgbaImage> {
Self::read_framebuffer_to_image(&self.gl, self.framebuffer_id, source_rectangle) Self::read_framebuffer_to_image(&self.gl, self.framebuffer_id, source_rectangle)
} }
fn read_framebuffer_to_image( fn read_framebuffer_to_image(
gl: &Rc<dyn Gl>, gl: &Rc<dyn Gl>,
framebuffer_id: u32, framebuffer_id: u32,
source_rectangle: Rect<u32>, source_rectangle: DeviceIntRect,
) -> Option<RgbaImage> { ) -> Option<RgbaImage> {
gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer_id); gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer_id);
@ -597,10 +630,10 @@ impl Framebuffer {
gl.bind_vertex_array(0); gl.bind_vertex_array(0);
let mut pixels = gl.read_pixels( let mut pixels = gl.read_pixels(
source_rectangle.origin.x as i32, source_rectangle.min.x,
source_rectangle.origin.y as i32, source_rectangle.min.y,
source_rectangle.width() as gl::GLsizei, source_rectangle.width(),
source_rectangle.height() as gl::GLsizei, source_rectangle.height(),
gl::RGBA, gl::RGBA,
gl::UNSIGNED_BYTE, gl::UNSIGNED_BYTE,
); );
@ -630,7 +663,7 @@ impl Framebuffer {
pub struct OffscreenRenderingContext { pub struct OffscreenRenderingContext {
parent_context: Rc<WindowRenderingContext>, parent_context: Rc<WindowRenderingContext>,
size: Cell<Size2D<u32>>, size: Cell<PhysicalSize<u32>>,
back_framebuffer: RefCell<Framebuffer>, back_framebuffer: RefCell<Framebuffer>,
front_framebuffer: RefCell<Option<Framebuffer>>, front_framebuffer: RefCell<Option<Framebuffer>>,
} }
@ -638,7 +671,7 @@ pub struct OffscreenRenderingContext {
type RenderToParentCallback = Box<dyn Fn(&glow::Context, Rect<i32>) + Send + Sync>; type RenderToParentCallback = Box<dyn Fn(&glow::Context, Rect<i32>) + Send + Sync>;
impl OffscreenRenderingContext { impl OffscreenRenderingContext {
fn new(parent_context: Rc<WindowRenderingContext>, size: Size2D<u32>) -> Self { fn new(parent_context: Rc<WindowRenderingContext>, size: PhysicalSize<u32>) -> Self {
let next_framebuffer = Framebuffer::new(parent_context.gl_api(), size); let next_framebuffer = Framebuffer::new(parent_context.gl_api(), size);
Self { Self {
parent_context, parent_context,
@ -663,8 +696,9 @@ impl OffscreenRenderingContext {
// Don't accept a `None` context for the read framebuffer. // Don't accept a `None` context for the read framebuffer.
let front_framebuffer_id = let front_framebuffer_id =
NonZeroU32::new(self.front_framebuffer_id()?).map(NativeFramebuffer)?; NonZeroU32::new(self.front_framebuffer_id()?).map(NativeFramebuffer)?;
let parent_context_framebuffer_id = self.parent_context.0.framebuffer(); let parent_context_framebuffer_id = self.parent_context.surfman_context.framebuffer();
let size = self.size.get(); let size = self.size.get();
let size = Size2D::new(size.width as i32, size.height as i32);
Some(Box::new(move |gl, target_rect| { Some(Box::new(move |gl, target_rect| {
Self::render_framebuffer_to_parent_context( Self::render_framebuffer_to_parent_context(
gl, gl,
@ -718,11 +752,15 @@ impl OffscreenRenderingContext {
} }
impl RenderingContext for OffscreenRenderingContext { impl RenderingContext for OffscreenRenderingContext {
fn resize(&self, size: Size2D<i32>) { fn size(&self) -> PhysicalSize<u32> {
self.size.get()
}
fn resize(&self, size: PhysicalSize<u32>) {
// We do not resize any buffers right now. The current buffers might be too big or too // We do not resize any buffers right now. The current buffers might be too big or too
// small, but we only want to ensure (later) that next buffer that we draw to is the // small, but we only want to ensure (later) that next buffer that we draw to is the
// correct size. // correct size.
self.size.set(size.to_u32()); self.size.set(size);
} }
fn prepare_for_rendering(&self) { fn prepare_for_rendering(&self) {
@ -756,7 +794,10 @@ impl RenderingContext for OffscreenRenderingContext {
self.parent_context.gl_api() self.parent_context.gl_api()
} }
fn create_texture(&self, surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> { fn create_texture(
&self,
surface: Surface,
) -> Option<(SurfaceTexture, u32, UntypedSize2D<i32>)> {
self.parent_context.create_texture(surface) self.parent_context.create_texture(surface)
} }
@ -768,7 +809,7 @@ impl RenderingContext for OffscreenRenderingContext {
self.parent_context.connection() self.parent_context.connection()
} }
fn read_to_image(&self, source_rectangle: Rect<u32>) -> Option<RgbaImage> { fn read_to_image(&self, source_rectangle: DeviceIntRect) -> Option<RgbaImage> {
self.back_framebuffer self.back_framebuffer
.borrow() .borrow()
.read_to_image(source_rectangle) .read_to_image(source_rectangle)
@ -777,7 +818,8 @@ impl RenderingContext for OffscreenRenderingContext {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use euclid::{Point2D, Rect, Size2D}; use dpi::PhysicalSize;
use euclid::{Box2D, Point2D, Size2D};
use gleam::gl; use gleam::gl;
use image::Rgba; use image::Rgba;
use surfman::{Connection, ContextAttributeFlags, ContextAttributes, Error, GLApi, GLVersion}; use surfman::{Connection, ContextAttributeFlags, ContextAttributes, Error, GLApi, GLVersion};
@ -807,15 +849,16 @@ mod test {
{ {
const SIZE: u32 = 16; const SIZE: u32 = 16;
let framebuffer = Framebuffer::new(gl, Size2D::new(SIZE, SIZE)); let framebuffer = Framebuffer::new(gl, PhysicalSize::new(SIZE, SIZE));
framebuffer.bind(); framebuffer.bind();
framebuffer framebuffer
.gl .gl
.clear_color(12.0 / 255.0, 34.0 / 255.0, 56.0 / 255.0, 78.0 / 255.0); .clear_color(12.0 / 255.0, 34.0 / 255.0, 56.0 / 255.0, 78.0 / 255.0);
framebuffer.gl.clear(gl::COLOR_BUFFER_BIT); framebuffer.gl.clear(gl::COLOR_BUFFER_BIT);
let rect = Box2D::from_origin_and_size(Point2D::zero(), Size2D::new(SIZE, SIZE));
let img = framebuffer let img = framebuffer
.read_to_image(Rect::new(Point2D::zero(), Size2D::new(SIZE, SIZE))) .read_to_image(rect.to_i32())
.expect("Should have been able to read back image."); .expect("Should have been able to read back image.");
assert_eq!(img.width(), SIZE); assert_eq!(img.width(), SIZE);
assert_eq!(img.height(), SIZE); assert_eq!(img.height(), SIZE);

View file

@ -7,14 +7,14 @@ use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use std::rc::Rc; use std::rc::Rc;
use euclid::Vector2D; use euclid::{Point2D, Vector2D};
use image::{DynamicImage, ImageFormat}; use image::{DynamicImage, ImageFormat};
use keyboard_types::{Key, KeyboardEvent, Modifiers, ShortcutMatcher}; use keyboard_types::{Key, KeyboardEvent, Modifiers, ShortcutMatcher};
use log::{error, info}; use log::{error, info};
use servo::base::id::WebViewId; use servo::base::id::WebViewId;
use servo::config::pref; use servo::config::pref;
use servo::ipc_channel::ipc::IpcSender; use servo::ipc_channel::ipc::IpcSender;
use servo::webrender_api::units::{DeviceIntPoint, DeviceIntSize}; use servo::webrender_api::units::{DeviceIntPoint, DeviceIntRect, DeviceIntSize};
use servo::webrender_api::ScrollLocation; use servo::webrender_api::ScrollLocation;
use servo::{ use servo::{
AllowOrDenyRequest, AuthenticationRequest, FilterPattern, GamepadHapticEffectType, LoadStatus, AllowOrDenyRequest, AuthenticationRequest, FilterPattern, GamepadHapticEffectType, LoadStatus,
@ -130,13 +130,8 @@ impl RunningAppState {
}; };
let inner = self.inner(); let inner = self.inner();
let viewport_rect = inner let size = inner.window.rendering_context().size2d().to_i32();
.window let viewport_rect = DeviceIntRect::from_origin_and_size(Point2D::origin(), size);
.get_coordinates()
.viewport
.to_rect()
.to_untyped()
.to_u32();
let Some(image) = inner let Some(image) = inner
.window .window
.rendering_context() .rendering_context()
@ -467,17 +462,8 @@ impl WebViewDelegate for RunningAppState {
} }
fn notify_ready_to_show(&self, webview: servo::WebView) { fn notify_ready_to_show(&self, webview: servo::WebView) {
let rect = self
.inner()
.window
.get_coordinates()
.get_viewport()
.to_f32();
webview.focus(); webview.focus();
webview.move_resize(rect);
webview.raise_to_top(true); webview.raise_to_top(true);
webview.notify_rendering_context_resized();
} }
fn notify_closed(&self, webview: servo::WebView) { fn notify_closed(&self, webview: servo::WebView) {

View file

@ -121,7 +121,7 @@ impl Window {
.window_handle() .window_handle()
.expect("could not get window handle from window"); .expect("could not get window handle from window");
let window_rendering_context = Rc::new( let window_rendering_context = Rc::new(
WindowRenderingContext::new(display_handle, window_handle, &inner_size) WindowRenderingContext::new(display_handle, window_handle, inner_size)
.expect("Could not create RenderingContext for Window"), .expect("Could not create RenderingContext for Window"),
); );
@ -135,9 +135,7 @@ impl Window {
// Make sure the gl context is made current. // Make sure the gl context is made current.
window_rendering_context.make_current().unwrap(); window_rendering_context.make_current().unwrap();
let rendering_context_size = Size2D::new(inner_size.width, inner_size.height); let rendering_context = Rc::new(window_rendering_context.offscreen_context(inner_size));
let rendering_context =
Rc::new(window_rendering_context.offscreen_context(rendering_context_size));
debug!("Created window {:?}", winit_window.id()); debug!("Created window {:?}", winit_window.id());
Window { Window {
@ -614,11 +612,8 @@ impl WindowPortsMethods for Window {
}, },
WindowEvent::Resized(new_size) => { WindowEvent::Resized(new_size) => {
if self.inner_size.get() != new_size { if self.inner_size.get() != new_size {
let rendering_context_size = Size2D::new(new_size.width, new_size.height); self.window_rendering_context.resize(new_size);
self.window_rendering_context
.resize(rendering_context_size.to_i32());
self.inner_size.set(new_size); self.inner_size.set(new_size);
webview.notify_rendering_context_resized();
} }
}, },
WindowEvent::ThemeChanged(theme) => { WindowEvent::ThemeChanged(theme) => {
@ -736,17 +731,9 @@ impl WindowMethods for Window {
let window_scale: Scale<f64, DeviceIndependentPixel, DevicePixel> = let window_scale: Scale<f64, DeviceIndependentPixel, DevicePixel> =
Scale::new(self.winit_window.scale_factor()); Scale::new(self.winit_window.scale_factor());
let window_rect = (window_rect.to_f64() / window_scale).to_i32(); let window_rect = (window_rect.to_f64() / window_scale).to_i32();
let viewport_origin = DeviceIntPoint::zero(); // bottom left
let mut viewport_size = winit_size_to_euclid_size(self.winit_window.inner_size()).to_f32();
viewport_size.height -= (self.toolbar_height() * self.hidpi_factor()).0;
let viewport = DeviceIntRect::from_origin_and_size(viewport_origin, viewport_size.to_i32());
let screen_size = self.screen_size.to_i32(); let screen_size = self.screen_size.to_i32();
EmbedderCoordinates { EmbedderCoordinates {
viewport,
framebuffer: viewport.size(),
window_rect, window_rect,
screen_size, screen_size,
// FIXME: Winit doesn't have API for available size. Fallback to screen size // FIXME: Winit doesn't have API for available size. Fallback to screen size

View file

@ -86,7 +86,7 @@ impl WindowPortsMethods for Window {
// Because we are managing the rendering surface ourselves, there will be no other // Because we are managing the rendering surface ourselves, there will be no other
// notification (such as from the display manager) that it has changed size, so we // notification (such as from the display manager) that it has changed size, so we
// must notify the compositor here. // must notify the compositor here.
webview.notify_rendering_context_resized(); webview.resize(PhysicalSize::new(size.width as u32, size.height as u32));
Some(new_size) Some(new_size)
} }
@ -149,10 +149,7 @@ impl WindowPortsMethods for Window {
impl WindowMethods for Window { impl WindowMethods for Window {
fn get_coordinates(&self) -> EmbedderCoordinates { fn get_coordinates(&self) -> EmbedderCoordinates {
let inner_size = self.inner_size.get();
EmbedderCoordinates { EmbedderCoordinates {
viewport: Box2D::from_origin_and_size(Point2D::zero(), inner_size),
framebuffer: inner_size,
window_rect: self.window_rect, window_rect: self.window_rect,
screen_size: self.screen_size, screen_size: self.screen_size,
available_screen_size: self.screen_size, available_screen_size: self.screen_size,

View file

@ -7,6 +7,7 @@ use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use std::time::Instant; use std::time::Instant;
use dpi::PhysicalSize;
use egui::text::{CCursor, CCursorRange}; use egui::text::{CCursor, CCursorRange};
use egui::text_edit::TextEditState; use egui::text_edit::TextEditState;
use egui::{ use egui::{
@ -379,13 +380,11 @@ impl Minibrowser {
// If the top parts of the GUI changed size, then update the size of the WebView and also // If the top parts of the GUI changed size, then update the size of the WebView and also
// the size of its RenderingContext. // the size of its RenderingContext.
let available_size = ui.available_size(); let available_size = ui.available_size();
let rect = Box2D::from_origin_and_size( let size = Size2D::new(available_size.x, available_size.y) * scale;
Point2D::origin(), let rect = Box2D::from_origin_and_size(Point2D::origin(), size);
Size2D::new(available_size.x, available_size.y),
) * scale;
if rect != webview.rect() { if rect != webview.rect() {
webview.move_resize(rect); webview.move_resize(rect);
rendering_context.resize(rect.size().to_i32().to_untyped()); webview.resize(PhysicalSize::new(size.width as u32, size.height as u32))
} }
let min = ui.cursor().min; let min = ui.cursor().min;

View file

@ -713,13 +713,7 @@ fn jni_coords_to_rust_coords<'local>(
let height = get_non_null_field(env, obj, "height", "I")? let height = get_non_null_field(env, obj, "height", "I")?
.i() .i()
.map_err(|_| "height not an int")? as i32; .map_err(|_| "height not an int")? as i32;
let fb_width = get_non_null_field(env, obj, "fb_width", "I")? Ok(Coordinates::new(x, y, width, height))
.i()
.map_err(|_| "fb_width not an int")? as i32;
let fb_height = get_non_null_field(env, obj, "fb_height", "I")?
.i()
.map_err(|_| "fb_height not an int")? as i32;
Ok(Coordinates::new(x, y, width, height, fb_width, fb_height))
} }
fn get_field<'local>( fn get_field<'local>(

View file

@ -6,6 +6,7 @@ use std::cell::RefCell;
use std::mem; use std::mem;
use std::rc::Rc; use std::rc::Rc;
use dpi::PhysicalSize;
use raw_window_handle::{DisplayHandle, RawDisplayHandle, RawWindowHandle, WindowHandle}; use raw_window_handle::{DisplayHandle, RawDisplayHandle, RawWindowHandle, WindowHandle};
pub use servo::webrender_api::units::DeviceIntRect; pub use servo::webrender_api::units::DeviceIntRect;
/// The EventLoopWaker::wake function will be called from any thread. /// The EventLoopWaker::wake function will be called from any thread.
@ -68,11 +69,13 @@ pub fn init(
WindowHandle::borrow_raw(init_opts.window_handle), WindowHandle::borrow_raw(init_opts.window_handle),
) )
}; };
let size = init_opts.coordinates.viewport.size;
let rendering_context = Rc::new( let rendering_context = Rc::new(
WindowRenderingContext::new( WindowRenderingContext::new(
display_handle, display_handle,
window_handle, window_handle,
&init_opts.coordinates.framebuffer_size(), PhysicalSize::new(size.width as u32, size.height as u32),
) )
.expect("Could not create RenderingContext"), .expect("Could not create RenderingContext"),
); );

View file

@ -16,7 +16,7 @@ use servo::compositing::windowing::{
}; };
use servo::euclid::{Box2D, Point2D, Rect, Scale, Size2D, Vector2D}; use servo::euclid::{Box2D, Point2D, Rect, Scale, Size2D, Vector2D};
use servo::servo_geometry::DeviceIndependentPixel; use servo::servo_geometry::DeviceIndependentPixel;
use servo::webrender_api::units::{DeviceIntRect, DeviceIntSize, DevicePixel, DeviceRect}; use servo::webrender_api::units::{DeviceIntRect, DeviceIntSize, DevicePixel};
use servo::webrender_api::ScrollLocation; use servo::webrender_api::ScrollLocation;
use servo::{ use servo::{
AllowOrDenyRequest, ContextMenuResult, EmbedderProxy, EventLoopWaker, ImeEvent, InputEvent, AllowOrDenyRequest, ContextMenuResult, EmbedderProxy, EventLoopWaker, ImeEvent, InputEvent,
@ -34,30 +34,14 @@ use crate::prefs::ServoShellPreferences;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Coordinates { pub struct Coordinates {
pub viewport: Rect<i32, DevicePixel>, pub viewport: Rect<i32, DevicePixel>,
pub framebuffer: Size2D<i32, DevicePixel>,
} }
impl Coordinates { impl Coordinates {
pub fn new( pub fn new(x: i32, y: i32, width: i32, height: i32) -> Coordinates {
x: i32,
y: i32,
width: i32,
height: i32,
fb_width: i32,
fb_height: i32,
) -> Coordinates {
Coordinates { Coordinates {
viewport: Rect::new(Point2D::new(x, y), Size2D::new(width, height)), viewport: Rect::new(Point2D::new(x, y), Size2D::new(width, height)),
framebuffer: Size2D::new(fb_width, fb_height),
} }
} }
pub(crate) fn framebuffer_size(&self) -> PhysicalSize<u32> {
PhysicalSize::new(
self.framebuffer.width as u32,
self.framebuffer.height as u32,
)
}
} }
pub(super) struct ServoWindowCallbacks { pub(super) struct ServoWindowCallbacks {
@ -426,14 +410,12 @@ impl RunningAppState {
/// Let Servo know that the window has been resized. /// Let Servo know that the window has been resized.
pub fn resize(&self, coordinates: Coordinates) { pub fn resize(&self, coordinates: Coordinates) {
info!("resize to {:?}", coordinates); info!("resize to {:?}", coordinates,);
let size = coordinates.viewport.size; self.active_webview().resize(PhysicalSize::new(
self.rendering_context coordinates.viewport.width() as u32,
.resize(Size2D::new(size.width, size.height)); coordinates.viewport.height() as u32,
));
*self.callbacks.coordinates.borrow_mut() = coordinates; *self.callbacks.coordinates.borrow_mut() = coordinates;
self.active_webview().notify_rendering_context_resized();
self.active_webview()
.move_resize(DeviceRect::from_size(size.to_f32()));
self.perform_updates(); self.perform_updates();
} }
@ -633,9 +615,10 @@ impl RunningAppState {
pub fn resume_compositor(&self, window_handle: RawWindowHandle, coords: Coordinates) { pub fn resume_compositor(&self, window_handle: RawWindowHandle, coords: Coordinates) {
let window_handle = unsafe { WindowHandle::borrow_raw(window_handle) }; let window_handle = unsafe { WindowHandle::borrow_raw(window_handle) };
let size = coords.viewport.size.to_u32();
if let Err(e) = self if let Err(e) = self
.rendering_context .rendering_context
.set_window(window_handle, &coords.framebuffer_size()) .set_window(window_handle, PhysicalSize::new(size.width, size.height))
{ {
warn!("Binding native surface to context failed ({:?})", e); warn!("Binding native surface to context failed ({:?})", e);
} }
@ -723,8 +706,6 @@ impl WindowMethods for ServoWindowCallbacks {
let coords = self.coordinates.borrow(); let coords = self.coordinates.borrow();
let screen_size = (coords.viewport.size.to_f32() / self.hidpi_factor).to_i32(); let screen_size = (coords.viewport.size.to_f32() / self.hidpi_factor).to_i32();
EmbedderCoordinates { EmbedderCoordinates {
viewport: coords.viewport.to_box2d(),
framebuffer: coords.framebuffer,
window_rect: Box2D::from_origin_and_size(Point2D::zero(), screen_size), window_rect: Box2D::from_origin_and_size(Point2D::zero(), screen_size),
screen_size, screen_size,
available_screen_size: screen_size, available_screen_size: screen_size,

View file

@ -184,9 +184,7 @@ impl ServoAction {
servo.notify_vsync(); servo.notify_vsync();
servo.present_if_needed(); servo.present_if_needed();
}, },
Resize { width, height } => { Resize { width, height } => servo.resize(Coordinates::new(0, 0, *width, *height)),
servo.resize(Coordinates::new(0, 0, *width, *height, *width, *height))
},
}; };
} }
} }

View file

@ -7,6 +7,7 @@ use std::path::PathBuf;
use std::ptr::NonNull; use std::ptr::NonNull;
use std::rc::Rc; use std::rc::Rc;
use dpi::PhysicalSize;
use log::{debug, info}; use log::{debug, info};
use raw_window_handle::{ use raw_window_handle::{
DisplayHandle, OhosDisplayHandle, OhosNdkWindowHandle, RawDisplayHandle, RawWindowHandle, DisplayHandle, OhosDisplayHandle, OhosNdkWindowHandle, RawDisplayHandle, RawWindowHandle,
@ -66,14 +67,7 @@ pub fn init(
let Ok(window_size) = (unsafe { super::get_xcomponent_size(xcomponent, native_window) }) else { let Ok(window_size) = (unsafe { super::get_xcomponent_size(xcomponent, native_window) }) else {
return Err("Failed to get xcomponent size"); return Err("Failed to get xcomponent size");
}; };
let coordinates = Coordinates::new( let coordinates = Coordinates::new(0, 0, window_size.width, window_size.height);
0,
0,
window_size.width,
window_size.height,
window_size.width,
window_size.height,
);
let display_handle = RawDisplayHandle::Ohos(OhosDisplayHandle::new()); let display_handle = RawDisplayHandle::Ohos(OhosDisplayHandle::new());
let display_handle = unsafe { DisplayHandle::borrow_raw(display_handle) }; let display_handle = unsafe { DisplayHandle::borrow_raw(display_handle) };
@ -86,7 +80,7 @@ pub fn init(
WindowRenderingContext::new( WindowRenderingContext::new(
display_handle, display_handle,
window_handle, window_handle,
&coordinates.framebuffer_size(), PhysicalSize::new(window_size.width as u32, window_size.height as u32),
) )
.expect("Could not create RenderingContext"), .expect("Could not create RenderingContext"),
); );

View file

@ -85,8 +85,6 @@ public class JNIServo {
public int y = 0; public int y = 0;
public int width = 0; public int width = 0;
public int height = 0; public int height = 0;
public int fb_width = 0;
public int fb_height = 0;
} }
public interface Callbacks { public interface Callbacks {

View file

@ -372,8 +372,6 @@ public class ServoView extends SurfaceView
ServoCoordinates coords = new ServoCoordinates(); ServoCoordinates coords = new ServoCoordinates();
coords.width = mServoView.getWidth(); coords.width = mServoView.getWidth();
coords.height = mServoView.getHeight(); coords.height = mServoView.getHeight();
coords.fb_width = mServoView.getWidth();
coords.fb_height = mServoView.getHeight();
Surface surface = holder.getSurface(); Surface surface = holder.getSurface();
ServoOptions options = new ServoOptions(); ServoOptions options = new ServoOptions();
@ -401,9 +399,6 @@ public class ServoView extends SurfaceView
ServoCoordinates coords = new ServoCoordinates(); ServoCoordinates coords = new ServoCoordinates();
coords.width = width; coords.width = width;
coords.height = height; coords.height = height;
coords.fb_width = width;
coords.fb_height = height;
mServoView.mServo.resize(coords); mServoView.mServo.resize(coords);
} }