mirror of
https://github.com/servo/servo.git
synced 2025-09-04 03:58:23 +01:00
compositor: Allow canvas to upload rendered contents asynchronously (#37776)
Adds epoch to each WR image op command that is sent to compositor. The renderer now has a `FrameDelayer` data structure that is responsible for tracking when a frame is ready to be displayed. When asking canvases to update their rendering, they are given an optional `Epoch` which denotes the `Document`'s canvas epoch. When all image updates for that `Epoch` are seen in the renderer, the frame can be displayed. Testing: Existing WPT tests Fixes: #35733 Signed-off-by: sagudev <16504129+sagudev@users.noreply.github.com> Signed-off-by: Martin Robinson <mrobinson@igalia.com> Co-authored-by: Martin Robinson <mrobinson@igalia.com>
This commit is contained in:
parent
4700149fcb
commit
8beef6c21f
36 changed files with 452 additions and 100 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1137,6 +1137,7 @@ name = "canvas"
|
|||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"app_units",
|
||||
"base",
|
||||
"bytemuck",
|
||||
"canvas_traits",
|
||||
"compositing_traits",
|
||||
|
|
|
@ -18,6 +18,7 @@ tracing = ["dep:tracing"]
|
|||
|
||||
[dependencies]
|
||||
app_units = { workspace = true }
|
||||
base = { workspace = true }
|
||||
bytemuck = { workspace = true, features = ["extern_crate_alloc"] }
|
||||
canvas_traits = { workspace = true }
|
||||
compositing_traits = { workspace = true }
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use base::Epoch;
|
||||
use canvas_traits::canvas::*;
|
||||
use compositing_traits::CrossProcessCompositorApi;
|
||||
use euclid::default::{Point2D, Rect, Size2D, Transform2D};
|
||||
|
@ -279,11 +280,11 @@ impl<DrawTarget: GenericDrawTarget> CanvasData<DrawTarget> {
|
|||
.drawtarget
|
||||
.create_similar_draw_target(&Size2D::new(size.width, size.height).cast());
|
||||
|
||||
self.update_image_rendering();
|
||||
self.update_image_rendering(None);
|
||||
}
|
||||
|
||||
/// Update image in WebRender
|
||||
pub(crate) fn update_image_rendering(&mut self) {
|
||||
pub(crate) fn update_image_rendering(&mut self, canvas_epoch: Option<Epoch>) {
|
||||
let (descriptor, data) = {
|
||||
#[cfg(feature = "tracing")]
|
||||
let _span = tracing::trace_span!(
|
||||
|
@ -295,7 +296,7 @@ impl<DrawTarget: GenericDrawTarget> CanvasData<DrawTarget> {
|
|||
};
|
||||
|
||||
self.compositor_api
|
||||
.update_image(self.image_key, descriptor, data);
|
||||
.update_image(self.image_key, descriptor, data, canvas_epoch);
|
||||
}
|
||||
|
||||
// https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::borrow::ToOwned;
|
|||
use std::collections::HashMap;
|
||||
use std::{f32, thread};
|
||||
|
||||
use base::Epoch;
|
||||
use canvas_traits::ConstellationCanvasMsg;
|
||||
use canvas_traits::canvas::*;
|
||||
use compositing_traits::CrossProcessCompositorApi;
|
||||
|
@ -253,9 +254,8 @@ impl CanvasPaintThread {
|
|||
self.canvas(canvas_id)
|
||||
.put_image_data(snapshot.to_owned(), rect);
|
||||
},
|
||||
Canvas2dMsg::UpdateImage(sender) => {
|
||||
self.canvas(canvas_id).update_image_rendering();
|
||||
sender.send(()).unwrap();
|
||||
Canvas2dMsg::UpdateImage(canvas_epoch) => {
|
||||
self.canvas(canvas_id).update_image_rendering(canvas_epoch);
|
||||
},
|
||||
Canvas2dMsg::PopClips(clips) => self.canvas(canvas_id).pop_clips(clips),
|
||||
}
|
||||
|
@ -526,12 +526,12 @@ impl Canvas {
|
|||
}
|
||||
}
|
||||
|
||||
fn update_image_rendering(&mut self) {
|
||||
fn update_image_rendering(&mut self, canvas_epoch: Option<Epoch>) {
|
||||
match self {
|
||||
#[cfg(feature = "vello")]
|
||||
Canvas::Vello(canvas_data) => canvas_data.update_image_rendering(),
|
||||
Canvas::Vello(canvas_data) => canvas_data.update_image_rendering(canvas_epoch),
|
||||
#[cfg(feature = "vello_cpu")]
|
||||
Canvas::VelloCPU(canvas_data) => canvas_data.update_image_rendering(),
|
||||
Canvas::VelloCPU(canvas_data) => canvas_data.update_image_rendering(canvas_epoch),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::cell::{Cell, Ref, RefCell};
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::env;
|
||||
use std::fs::create_dir_all;
|
||||
use std::iter::once;
|
||||
|
@ -46,7 +47,7 @@ use webrender_api::units::{
|
|||
use webrender_api::{
|
||||
self, BuiltDisplayList, DirtyRect, DisplayListPayload, DocumentId, Epoch as WebRenderEpoch,
|
||||
ExternalScrollId, FontInstanceFlags, FontInstanceKey, FontInstanceOptions, FontKey,
|
||||
FontVariation, HitTestFlags, PipelineId as WebRenderPipelineId, PropertyBinding,
|
||||
FontVariation, HitTestFlags, ImageKey, PipelineId as WebRenderPipelineId, PropertyBinding,
|
||||
ReferenceFrameKind, RenderReasons, SampledScrollOffset, ScrollLocation, SpaceAndClipInfo,
|
||||
SpatialId, SpatialTreeItemKey, TransformStyle,
|
||||
};
|
||||
|
@ -83,6 +84,7 @@ pub enum WebRenderDebugOption {
|
|||
TextureCacheDebug,
|
||||
RenderTargetDebug,
|
||||
}
|
||||
|
||||
/// Data that is shared by all WebView renderers.
|
||||
pub struct ServoRenderer {
|
||||
/// The [`RefreshDriver`] which manages the rythym of painting.
|
||||
|
@ -120,6 +122,11 @@ pub struct ServoRenderer {
|
|||
/// The last position in the rendered view that the mouse moved over. This becomes `None`
|
||||
/// when the mouse leaves the rendered view.
|
||||
pub(crate) last_mouse_move_position: Option<DevicePoint>,
|
||||
|
||||
/// A [`FrameRequestDelayer`] which is used to wait for canvas image updates to
|
||||
/// arrive before requesting a new frame, as these happen asynchronously with
|
||||
/// `ScriptThread` display list construction.
|
||||
frame_delayer: FrameDelayer,
|
||||
}
|
||||
|
||||
/// NB: Never block on the constellation, because sometimes the constellation blocks on us.
|
||||
|
@ -144,7 +151,7 @@ pub struct IOCompositor {
|
|||
rendering_context: Rc<dyn RenderingContext>,
|
||||
|
||||
/// The number of frames pending to receive from WebRender.
|
||||
pending_frames: usize,
|
||||
pending_frames: Cell<usize>,
|
||||
|
||||
/// A handle to the memory profiler which will automatically unregister
|
||||
/// when it's dropped.
|
||||
|
@ -321,13 +328,14 @@ impl IOCompositor {
|
|||
webxr_main_thread: state.webxr_main_thread,
|
||||
convert_mouse_to_touch,
|
||||
last_mouse_move_position: None,
|
||||
frame_delayer: Default::default(),
|
||||
})),
|
||||
webview_renderers: WebViewManager::default(),
|
||||
needs_repaint: Cell::default(),
|
||||
ready_to_save_state: ReadyState::Unknown,
|
||||
webrender: Some(state.webrender),
|
||||
rendering_context: state.rendering_context,
|
||||
pending_frames: 0,
|
||||
pending_frames: Cell::new(0),
|
||||
_mem_profiler_registration: registration,
|
||||
};
|
||||
|
||||
|
@ -492,7 +500,7 @@ impl IOCompositor {
|
|||
self.ready_to_save_state,
|
||||
ReadyState::WaitingForConstellationReply
|
||||
);
|
||||
if is_ready && self.pending_frames == 0 {
|
||||
if is_ready && self.pending_frames.get() == 0 {
|
||||
self.ready_to_save_state = ReadyState::ReadyToSaveImage;
|
||||
} else {
|
||||
self.ready_to_save_state = ReadyState::Unknown;
|
||||
|
@ -661,7 +669,6 @@ impl IOCompositor {
|
|||
}
|
||||
|
||||
let mut transaction = Transaction::new();
|
||||
|
||||
let is_root_pipeline =
|
||||
Some(pipeline_id.into()) == webview_renderer.root_pipeline_id;
|
||||
if is_root_pipeline && old_scale != webview_renderer.device_pixels_per_page_pixel()
|
||||
|
@ -676,9 +683,22 @@ impl IOCompositor {
|
|||
},
|
||||
|
||||
CompositorMsg::GenerateFrame => {
|
||||
let mut transaction = Transaction::new();
|
||||
self.generate_frame(&mut transaction, RenderReasons::SCENE);
|
||||
self.global.borrow_mut().send_transaction(transaction);
|
||||
let mut global = self.global.borrow_mut();
|
||||
global.frame_delayer.set_pending_frame(true);
|
||||
|
||||
if global.frame_delayer.needs_new_frame() {
|
||||
let mut transaction = Transaction::new();
|
||||
self.generate_frame(&mut transaction, RenderReasons::SCENE);
|
||||
global.send_transaction(transaction);
|
||||
|
||||
let waiting_pipelines = global.frame_delayer.take_waiting_pipelines();
|
||||
let _ = global.constellation_sender.send(
|
||||
EmbedderToConstellationMessage::NoLongerWaitingOnAsynchronousImageUpdates(
|
||||
waiting_pipelines,
|
||||
),
|
||||
);
|
||||
global.frame_delayer.set_pending_frame(false);
|
||||
}
|
||||
},
|
||||
|
||||
CompositorMsg::GenerateImageKey(sender) => {
|
||||
|
@ -699,21 +719,46 @@ impl IOCompositor {
|
|||
}
|
||||
},
|
||||
CompositorMsg::UpdateImages(updates) => {
|
||||
let mut global = self.global.borrow_mut();
|
||||
let mut txn = Transaction::new();
|
||||
for update in updates {
|
||||
match update {
|
||||
ImageUpdate::AddImage(key, desc, data) => {
|
||||
txn.add_image(key, desc, data.into(), None)
|
||||
},
|
||||
ImageUpdate::DeleteImage(key) => txn.delete_image(key),
|
||||
ImageUpdate::UpdateImage(key, desc, data) => {
|
||||
ImageUpdate::DeleteImage(key) => {
|
||||
txn.delete_image(key);
|
||||
global.frame_delayer.delete_image(key);
|
||||
},
|
||||
ImageUpdate::UpdateImage(key, desc, data, epoch) => {
|
||||
if let Some(epoch) = epoch {
|
||||
global.frame_delayer.update_image(key, epoch);
|
||||
}
|
||||
txn.update_image(key, desc, data.into(), &DirtyRect::All)
|
||||
},
|
||||
}
|
||||
}
|
||||
self.global.borrow_mut().send_transaction(txn);
|
||||
|
||||
if global.frame_delayer.needs_new_frame() {
|
||||
global.frame_delayer.set_pending_frame(false);
|
||||
self.generate_frame(&mut txn, RenderReasons::SCENE);
|
||||
let waiting_pipelines = global.frame_delayer.take_waiting_pipelines();
|
||||
let _ = global.constellation_sender.send(
|
||||
EmbedderToConstellationMessage::NoLongerWaitingOnAsynchronousImageUpdates(
|
||||
waiting_pipelines,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
global.send_transaction(txn);
|
||||
},
|
||||
|
||||
CompositorMsg::DelayNewFrameForCanvas(pipeline_id, canvas_epoch, image_keys) => self
|
||||
.global
|
||||
.borrow_mut()
|
||||
.frame_delayer
|
||||
.add_delay(pipeline_id, canvas_epoch, image_keys),
|
||||
|
||||
CompositorMsg::AddFont(font_key, data, index) => {
|
||||
self.add_font(font_key, index, data);
|
||||
},
|
||||
|
@ -802,7 +847,7 @@ impl IOCompositor {
|
|||
},
|
||||
CompositorMsg::NewWebRenderFrameReady(..) => {
|
||||
// Subtract from the number of pending frames, but do not do any compositing.
|
||||
self.pending_frames -= 1;
|
||||
self.pending_frames.set(self.pending_frames.get() - 1);
|
||||
},
|
||||
_ => {
|
||||
debug!("Ignoring message ({:?} while shutting down", msg);
|
||||
|
@ -832,8 +877,8 @@ impl IOCompositor {
|
|||
}
|
||||
|
||||
/// Queue a new frame in the transaction and increase the pending frames count.
|
||||
pub(crate) fn generate_frame(&mut self, transaction: &mut Transaction, reason: RenderReasons) {
|
||||
self.pending_frames += 1;
|
||||
pub(crate) fn generate_frame(&self, transaction: &mut Transaction, reason: RenderReasons) {
|
||||
self.pending_frames.set(self.pending_frames.get() + 1);
|
||||
transaction.generate_frame(0, true /* present */, reason);
|
||||
}
|
||||
|
||||
|
@ -1407,7 +1452,7 @@ impl IOCompositor {
|
|||
CompositorMsg::NewWebRenderFrameReady(..) if found_recomposite_msg => {
|
||||
// Only take one of duplicate NewWebRendeFrameReady messages, but do subtract
|
||||
// one frame from the pending frames.
|
||||
self.pending_frames -= 1;
|
||||
self.pending_frames.set(self.pending_frames.get() - 1);
|
||||
false
|
||||
},
|
||||
CompositorMsg::NewWebRenderFrameReady(..) => {
|
||||
|
@ -1625,7 +1670,7 @@ impl IOCompositor {
|
|||
}
|
||||
|
||||
fn handle_new_webrender_frame_ready(&mut self, recomposite_needed: bool) {
|
||||
self.pending_frames -= 1;
|
||||
self.pending_frames.set(self.pending_frames.get() - 1);
|
||||
if recomposite_needed {
|
||||
self.refresh_cursor();
|
||||
}
|
||||
|
@ -1634,3 +1679,77 @@ impl IOCompositor {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A struct that is reponsible for delaying frame requests until all new canvas images
|
||||
/// for a particular "update the rendering" call in the `ScriptThread` have been
|
||||
/// sent to WebRender.
|
||||
///
|
||||
/// These images may be updated in WebRender asynchronously in the canvas task. A frame
|
||||
/// is then requested if:
|
||||
///
|
||||
/// - The renderer has received a GenerateFrame message from a `ScriptThread`.
|
||||
/// - All pending image updates have finished and have been noted in the [`FrameDelayer`].
|
||||
#[derive(Default)]
|
||||
struct FrameDelayer {
|
||||
/// The latest [`Epoch`] of canvas images that have been sent to WebRender. Note
|
||||
/// that this only records the `Epoch`s for canvases and only ones that are involved
|
||||
/// in "update the rendering".
|
||||
image_epochs: HashMap<ImageKey, Epoch>,
|
||||
/// A map of all pending canvas images
|
||||
pending_canvas_images: HashMap<ImageKey, Epoch>,
|
||||
/// Whether or not we have a pending frame.
|
||||
pending_frame: bool,
|
||||
/// A list of pipelines that should be notified when we are no longer waiting for
|
||||
/// canvas images.
|
||||
waiting_pipelines: HashSet<PipelineId>,
|
||||
}
|
||||
|
||||
impl FrameDelayer {
|
||||
fn delete_image(&mut self, image_key: ImageKey) {
|
||||
self.image_epochs.remove(&image_key);
|
||||
self.pending_canvas_images.remove(&image_key);
|
||||
}
|
||||
|
||||
fn update_image(&mut self, image_key: ImageKey, epoch: Epoch) {
|
||||
self.image_epochs.insert(image_key, epoch);
|
||||
let Entry::Occupied(entry) = self.pending_canvas_images.entry(image_key) else {
|
||||
return;
|
||||
};
|
||||
if *entry.get() <= epoch {
|
||||
entry.remove();
|
||||
}
|
||||
}
|
||||
|
||||
fn add_delay(
|
||||
&mut self,
|
||||
pipeline_id: PipelineId,
|
||||
canvas_epoch: Epoch,
|
||||
image_keys: Vec<ImageKey>,
|
||||
) {
|
||||
for image_key in image_keys.into_iter() {
|
||||
// If we've already seen the necessary epoch for this image, do not
|
||||
// start waiting for it.
|
||||
if self
|
||||
.image_epochs
|
||||
.get(&image_key)
|
||||
.is_some_and(|epoch_seen| *epoch_seen >= canvas_epoch)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
self.pending_canvas_images.insert(image_key, canvas_epoch);
|
||||
}
|
||||
self.waiting_pipelines.insert(pipeline_id);
|
||||
}
|
||||
|
||||
fn needs_new_frame(&self) -> bool {
|
||||
self.pending_frame && self.pending_canvas_images.is_empty()
|
||||
}
|
||||
|
||||
fn set_pending_frame(&mut self, value: bool) {
|
||||
self.pending_frame = value;
|
||||
}
|
||||
|
||||
fn take_waiting_pipelines(&mut self) -> Vec<PipelineId> {
|
||||
self.waiting_pipelines.drain().collect()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,6 +53,7 @@ mod from_constellation {
|
|||
Self::CollectMemoryReport(..) => target!("CollectMemoryReport"),
|
||||
Self::Viewport(..) => target!("Viewport"),
|
||||
Self::GenerateImageKeysForPipeline(..) => target!("GenerateImageKeysForPipeline"),
|
||||
Self::DelayNewFrameForCanvas(..) => target!("DelayFramesForCanvas"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1458,6 +1458,9 @@ where
|
|||
EmbedderToConstellationMessage::TickAnimation(webview_ids) => {
|
||||
self.handle_tick_animation(webview_ids)
|
||||
},
|
||||
EmbedderToConstellationMessage::NoLongerWaitingOnAsynchronousImageUpdates(
|
||||
pipeline_ids,
|
||||
) => self.handle_no_longer_waiting_on_asynchronous_image_updates(pipeline_ids),
|
||||
EmbedderToConstellationMessage::WebDriverCommand(command) => {
|
||||
self.handle_webdriver_msg(command);
|
||||
},
|
||||
|
@ -3498,6 +3501,20 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
#[servo_tracing::instrument(skip_all)]
|
||||
fn handle_no_longer_waiting_on_asynchronous_image_updates(
|
||||
&mut self,
|
||||
pipeline_ids: Vec<PipelineId>,
|
||||
) {
|
||||
for pipeline_id in pipeline_ids.into_iter() {
|
||||
if let Some(pipeline) = self.pipelines.get(&pipeline_id) {
|
||||
let _ = pipeline.event_loop.send(
|
||||
ScriptThreadMessage::NoLongerWaitingOnAsychronousImageUpdates(pipeline_id),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Schedule a navigation(via load_url).
|
||||
/// 1: Ask the embedder for permission.
|
||||
/// 2: Store the details of the navigation, pending approval from the embedder.
|
||||
|
|
|
@ -79,6 +79,9 @@ mod from_compositor {
|
|||
Self::SendImageKeysForPipeline(..) => target!("SendImageKeysForPipeline"),
|
||||
Self::SetWebDriverResponseSender(..) => target!("SetWebDriverResponseSender"),
|
||||
Self::PreferencesUpdated(..) => target!("PreferencesUpdated"),
|
||||
Self::NoLongerWaitingOnAsynchronousImageUpdates(..) => {
|
||||
target!("NoLongerWaitingOnCanvas")
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
//! Common interfaces for Canvas Contexts
|
||||
|
||||
use base::Epoch;
|
||||
use euclid::default::Size2D;
|
||||
use layout_api::HTMLCanvasData;
|
||||
use pixels::Snapshot;
|
||||
|
@ -66,7 +67,14 @@ pub(crate) trait CanvasContext {
|
|||
}
|
||||
}
|
||||
|
||||
fn update_rendering(&self) {}
|
||||
/// The WebRender [`ImageKey`] of this [`CanvasContext`] if any.
|
||||
fn image_key(&self) -> Option<ImageKey>;
|
||||
|
||||
/// Request that the [`CanvasContext`] update the rendering of its contents, returning
|
||||
/// the new [`Epoch`] of the image produced, if one was.
|
||||
fn update_rendering(&self, _canvas_epoch: Option<Epoch>) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn onscreen(&self) -> bool {
|
||||
let Some(canvas) = self.canvas() else {
|
||||
|
@ -228,19 +236,31 @@ impl CanvasContext for RenderingContext {
|
|||
}
|
||||
}
|
||||
|
||||
fn update_rendering(&self) {
|
||||
fn image_key(&self) -> Option<ImageKey> {
|
||||
match self {
|
||||
RenderingContext::Placeholder(offscreen_canvas) => {
|
||||
if let Some(context) = offscreen_canvas.context() {
|
||||
context.update_rendering()
|
||||
}
|
||||
},
|
||||
RenderingContext::Context2d(context) => context.update_rendering(),
|
||||
RenderingContext::BitmapRenderer(context) => context.update_rendering(),
|
||||
RenderingContext::WebGL(context) => context.update_rendering(),
|
||||
RenderingContext::WebGL2(context) => context.update_rendering(),
|
||||
RenderingContext::Placeholder(offscreen_canvas) => offscreen_canvas
|
||||
.context()
|
||||
.and_then(|context| context.image_key()),
|
||||
RenderingContext::Context2d(context) => context.image_key(),
|
||||
RenderingContext::BitmapRenderer(context) => context.image_key(),
|
||||
RenderingContext::WebGL(context) => context.image_key(),
|
||||
RenderingContext::WebGL2(context) => context.image_key(),
|
||||
#[cfg(feature = "webgpu")]
|
||||
RenderingContext::WebGPU(context) => context.update_rendering(),
|
||||
RenderingContext::WebGPU(context) => context.image_key(),
|
||||
}
|
||||
}
|
||||
|
||||
fn update_rendering(&self, canvas_epoch: Option<Epoch>) -> bool {
|
||||
match self {
|
||||
RenderingContext::Placeholder(offscreen_canvas) => offscreen_canvas
|
||||
.context()
|
||||
.is_some_and(|context| context.update_rendering(canvas_epoch)),
|
||||
RenderingContext::Context2d(context) => context.update_rendering(canvas_epoch),
|
||||
RenderingContext::BitmapRenderer(context) => context.update_rendering(canvas_epoch),
|
||||
RenderingContext::WebGL(context) => context.update_rendering(canvas_epoch),
|
||||
RenderingContext::WebGL2(context) => context.update_rendering(canvas_epoch),
|
||||
#[cfg(feature = "webgpu")]
|
||||
RenderingContext::WebGPU(context) => context.update_rendering(canvas_epoch),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -333,11 +353,17 @@ impl CanvasContext for OffscreenRenderingContext {
|
|||
}
|
||||
}
|
||||
|
||||
fn update_rendering(&self) {
|
||||
fn image_key(&self) -> Option<ImageKey> {
|
||||
None
|
||||
}
|
||||
|
||||
fn update_rendering(&self, canvas_epoch: Option<Epoch>) -> bool {
|
||||
match self {
|
||||
OffscreenRenderingContext::Context2d(context) => context.update_rendering(),
|
||||
OffscreenRenderingContext::BitmapRenderer(context) => context.update_rendering(),
|
||||
OffscreenRenderingContext::Detached => {},
|
||||
OffscreenRenderingContext::Context2d(context) => context.update_rendering(canvas_epoch),
|
||||
OffscreenRenderingContext::BitmapRenderer(context) => {
|
||||
context.update_rendering(canvas_epoch)
|
||||
},
|
||||
OffscreenRenderingContext::Detached => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ use std::str::FromStr;
|
|||
use std::sync::Arc;
|
||||
|
||||
use app_units::Au;
|
||||
use base::Epoch;
|
||||
use canvas_traits::canvas::{
|
||||
Canvas2dMsg, CanvasFont, CanvasId, CanvasMsg, CompositionOptions, CompositionOrBlending,
|
||||
FillOrStrokeStyle, FillRule, GlyphAndPosition, LineCapStyle, LineJoinStyle, LineOptions,
|
||||
|
@ -287,19 +288,18 @@ impl CanvasState {
|
|||
}
|
||||
|
||||
/// Updates WR image and blocks on completion
|
||||
pub(crate) fn update_rendering(&self) {
|
||||
pub(crate) fn update_rendering(&self, canvas_epoch: Option<Epoch>) -> bool {
|
||||
if !self.is_paintable() {
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
let (sender, receiver) = ipc::channel().unwrap();
|
||||
self.ipc_renderer
|
||||
.send(CanvasMsg::Canvas2d(
|
||||
Canvas2dMsg::UpdateImage(sender),
|
||||
Canvas2dMsg::UpdateImage(canvas_epoch),
|
||||
self.canvas_id,
|
||||
))
|
||||
.unwrap();
|
||||
receiver.recv().unwrap();
|
||||
true
|
||||
}
|
||||
|
||||
/// <https://html.spec.whatwg.org/multipage/#concept-canvas-set-bitmap-dimensions>
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use base::Epoch;
|
||||
use canvas_traits::canvas::{Canvas2dMsg, CanvasId};
|
||||
use dom_struct::dom_struct;
|
||||
use euclid::default::Size2D;
|
||||
|
@ -122,8 +123,11 @@ impl CanvasContext for CanvasRenderingContext2D {
|
|||
Some(self.canvas.clone())
|
||||
}
|
||||
|
||||
fn update_rendering(&self) {
|
||||
self.canvas_state.update_rendering();
|
||||
fn update_rendering(&self, canvas_epoch: Option<Epoch>) -> bool {
|
||||
if !self.onscreen() {
|
||||
return false;
|
||||
}
|
||||
self.canvas_state.update_rendering(canvas_epoch)
|
||||
}
|
||||
|
||||
fn resize(&self) {
|
||||
|
@ -155,6 +159,10 @@ impl CanvasContext for CanvasRenderingContext2D {
|
|||
canvas.owner_document().add_dirty_2d_canvas(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn image_key(&self) -> Option<ImageKey> {
|
||||
Some(self.canvas_state.image_key())
|
||||
}
|
||||
}
|
||||
|
||||
// We add a guard to each of methods by the spec:
|
||||
|
|
|
@ -12,10 +12,11 @@ use std::str::FromStr;
|
|||
use std::sync::{LazyLock, Mutex};
|
||||
use std::time::Duration;
|
||||
|
||||
use base::Epoch;
|
||||
use base::cross_process_instant::CrossProcessInstant;
|
||||
use base::id::WebViewId;
|
||||
use canvas_traits::canvas::CanvasId;
|
||||
use canvas_traits::webgl::{self, WebGLContextId, WebGLMsg};
|
||||
use canvas_traits::webgl::{WebGLContextId, WebGLMsg};
|
||||
use chrono::Local;
|
||||
use constellation_traits::{NavigationHistoryBehavior, ScriptToConstellationMessage};
|
||||
use content_security_policy::{CspList, PolicyDisposition};
|
||||
|
@ -556,6 +557,15 @@ pub(crate) struct Document {
|
|||
/// When a `ResizeObserver` starts observing a target, this becomes true, which in turn is a
|
||||
/// signal to the [`ScriptThread`] that a rendering update should happen.
|
||||
resize_observer_started_observing_target: Cell<bool>,
|
||||
/// Whether or not this [`Document`] is waiting on canvas image updates. If it is
|
||||
/// waiting it will not do any new layout until the canvas images are up-to-date in
|
||||
/// the renderer.
|
||||
waiting_on_canvas_image_updates: Cell<bool>,
|
||||
/// The current canvas epoch, which is used to track when canvas images have been
|
||||
/// uploaded to the renderer after a rendering update. Until those images are uploaded
|
||||
/// this `Document` will not perform any more rendering updates.
|
||||
#[no_trace]
|
||||
current_canvas_epoch: RefCell<Epoch>,
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
|
@ -2671,40 +2681,69 @@ impl Document {
|
|||
}
|
||||
|
||||
// All dirty canvases are flushed before updating the rendering.
|
||||
#[cfg(feature = "webgpu")]
|
||||
self.webgpu_contexts
|
||||
.borrow_mut()
|
||||
.iter()
|
||||
.filter_map(|(_, context)| context.root())
|
||||
.filter(|context| context.onscreen())
|
||||
.for_each(|context| context.update_rendering());
|
||||
self.current_canvas_epoch.borrow_mut().next();
|
||||
let canvas_epoch = *self.current_canvas_epoch.borrow();
|
||||
let mut image_keys = Vec::new();
|
||||
|
||||
self.dirty_2d_contexts
|
||||
.borrow_mut()
|
||||
.drain()
|
||||
.filter(|(_, context)| context.onscreen())
|
||||
.for_each(|(_, context)| context.update_rendering());
|
||||
#[cfg(feature = "webgpu")]
|
||||
image_keys.extend(
|
||||
self.webgpu_contexts
|
||||
.borrow_mut()
|
||||
.iter()
|
||||
.filter_map(|(_, context)| context.root())
|
||||
.filter(|context| context.update_rendering(Some(canvas_epoch)))
|
||||
.map(|context| context.image_key()),
|
||||
);
|
||||
|
||||
image_keys.extend(
|
||||
self.dirty_2d_contexts
|
||||
.borrow_mut()
|
||||
.drain()
|
||||
.filter(|(_, context)| context.update_rendering(Some(canvas_epoch)))
|
||||
.map(|(_, context)| context.image_key()),
|
||||
);
|
||||
|
||||
let dirty_webgl_context_ids: Vec<_> = self
|
||||
.dirty_webgl_contexts
|
||||
.borrow_mut()
|
||||
.drain()
|
||||
.filter(|(_, context)| context.onscreen())
|
||||
.map(|(id, _)| id)
|
||||
.map(|(id, context)| {
|
||||
image_keys.push(context.image_key());
|
||||
id
|
||||
})
|
||||
.collect();
|
||||
|
||||
if !dirty_webgl_context_ids.is_empty() {
|
||||
let (sender, receiver) = webgl::webgl_channel().unwrap();
|
||||
self.window
|
||||
.webgl_chan()
|
||||
.expect("Where's the WebGL channel?")
|
||||
.send(WebGLMsg::SwapBuffers(dirty_webgl_context_ids, sender, 0))
|
||||
.send(WebGLMsg::SwapBuffers(
|
||||
dirty_webgl_context_ids,
|
||||
Some(canvas_epoch),
|
||||
0,
|
||||
))
|
||||
.unwrap();
|
||||
receiver.recv().unwrap();
|
||||
}
|
||||
|
||||
// The renderer should wait to display the frame until all canvas images are
|
||||
// uploaded. This allows canvas image uploading to happen asynchronously.
|
||||
if !image_keys.is_empty() {
|
||||
self.waiting_on_canvas_image_updates.set(true);
|
||||
self.window().compositor_api().delay_new_frame_for_canvas(
|
||||
self.window().pipeline_id(),
|
||||
canvas_epoch,
|
||||
image_keys.into_iter().flatten().collect(),
|
||||
);
|
||||
}
|
||||
|
||||
self.window().reflow(ReflowGoal::UpdateTheRendering)
|
||||
}
|
||||
|
||||
pub(crate) fn handle_no_longer_waiting_on_asynchronous_image_updates(&self) {
|
||||
self.waiting_on_canvas_image_updates.set(false);
|
||||
}
|
||||
|
||||
/// From <https://drafts.csswg.org/css-font-loading/#fontfaceset-pending-on-the-environment>:
|
||||
///
|
||||
/// > A FontFaceSet is pending on the environment if any of the following are true:
|
||||
|
@ -3390,6 +3429,8 @@ impl Document {
|
|||
adopted_stylesheets_frozen_types: CachedFrozenArray::new(),
|
||||
pending_scroll_event_targets: Default::default(),
|
||||
resize_observer_started_observing_target: Cell::new(false),
|
||||
waiting_on_canvas_image_updates: Cell::new(false),
|
||||
current_canvas_epoch: RefCell::new(Epoch(0)),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use base::Epoch;
|
||||
use dom_struct::dom_struct;
|
||||
use webrender_api::ImageKey;
|
||||
|
||||
|
|
|
@ -224,6 +224,7 @@ impl VideoFrameRenderer for MediaFrameRenderer {
|
|||
current_frame.image_key,
|
||||
descriptor,
|
||||
SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data())),
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
|
|
|
@ -150,6 +150,10 @@ impl CanvasContext for ImageBitmapRenderingContext {
|
|||
.as_ref()
|
||||
.map_or_else(|| self.canvas.size(), |bitmap| bitmap.size())
|
||||
}
|
||||
|
||||
fn image_key(&self) -> Option<ImageKey> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl ImageBitmapRenderingContextMethods<crate::DomTypeHolder> for ImageBitmapRenderingContext {
|
||||
|
|
|
@ -97,6 +97,10 @@ impl CanvasContext for OffscreenCanvasRenderingContext2D {
|
|||
fn origin_is_clean(&self) -> bool {
|
||||
self.context.origin_is_clean()
|
||||
}
|
||||
|
||||
fn image_key(&self) -> Option<webrender_api::ImageKey> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl OffscreenCanvasRenderingContext2DMethods<crate::DomTypeHolder>
|
||||
|
|
|
@ -63,9 +63,12 @@ impl PaintRenderingContext2D {
|
|||
))
|
||||
}
|
||||
|
||||
pub(crate) fn update_rendering(&self) -> bool {
|
||||
self.canvas_state.update_rendering(None)
|
||||
}
|
||||
|
||||
/// Send update to canvas paint thread and returns [`ImageKey`]
|
||||
pub(crate) fn image_key(&self) -> ImageKey {
|
||||
self.canvas_state.update_rendering();
|
||||
self.canvas_state.image_key()
|
||||
}
|
||||
|
||||
|
|
|
@ -348,13 +348,13 @@ impl PaintWorkletGlobalScope {
|
|||
return self.invalid_image(size_in_dpx, missing_image_urls);
|
||||
}
|
||||
|
||||
let image_key = rendering_context.image_key();
|
||||
rendering_context.update_rendering();
|
||||
|
||||
DrawAPaintImageResult {
|
||||
width: size_in_dpx.width,
|
||||
height: size_in_dpx.height,
|
||||
format: PixelFormat::BGRA8,
|
||||
image_key: Some(image_key),
|
||||
image_key: Some(rendering_context.image_key()),
|
||||
missing_image_urls,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -992,6 +992,10 @@ impl CanvasContext for WebGL2RenderingContext {
|
|||
fn mark_as_dirty(&self) {
|
||||
self.base.mark_as_dirty()
|
||||
}
|
||||
|
||||
fn image_key(&self) -> Option<ImageKey> {
|
||||
self.base.image_key()
|
||||
}
|
||||
}
|
||||
|
||||
impl WebGL2RenderingContextMethods<crate::DomTypeHolder> for WebGL2RenderingContext {
|
||||
|
|
|
@ -2042,6 +2042,10 @@ impl CanvasContext for WebGLRenderingContext {
|
|||
HTMLCanvasElementOrOffscreenCanvas::OffscreenCanvas(_) => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn image_key(&self) -> Option<ImageKey> {
|
||||
Some(self.webrender_image)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "webgl_backtrace"))]
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::borrow::Cow;
|
|||
use std::cell::RefCell;
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use base::Epoch;
|
||||
use dom_struct::dom_struct;
|
||||
use ipc_channel::ipc::{self};
|
||||
use pixels::Snapshot;
|
||||
|
@ -183,19 +184,29 @@ impl GPUCanvasContext {
|
|||
}
|
||||
|
||||
/// <https://gpuweb.github.io/gpuweb/#abstract-opdef-expire-the-current-texture>
|
||||
fn expire_current_texture(&self) {
|
||||
if let Some(current_texture) = self.current_texture.take() {
|
||||
// Make copy of texture content
|
||||
self.send_swap_chain_present(current_texture.id());
|
||||
// Step 1
|
||||
current_texture.Destroy()
|
||||
}
|
||||
fn expire_current_texture(&self, canvas_epoch: Option<Epoch>) -> bool {
|
||||
// Step 1: If context.[[currentTexture]] is not null:
|
||||
let Some(current_texture) = self.current_texture.take() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Make copy of texture content
|
||||
let did_swap = self.send_swap_chain_present(current_texture.id(), canvas_epoch);
|
||||
|
||||
// Step 1.1: Call context.currentTexture.destroy() (without destroying
|
||||
// context.drawingBuffer) to terminate write access to the image.
|
||||
current_texture.Destroy();
|
||||
|
||||
// Step 1.2: Set context.[[currentTexture]] to null.
|
||||
// This is handled by the call to `.take()` above.
|
||||
|
||||
did_swap
|
||||
}
|
||||
|
||||
/// <https://gpuweb.github.io/gpuweb/#abstract-opdef-replace-the-drawing-buffer>
|
||||
fn replace_drawing_buffer(&self) {
|
||||
// Step 1
|
||||
self.expire_current_texture();
|
||||
self.expire_current_texture(None);
|
||||
// Step 2
|
||||
let configuration = self.configuration.borrow();
|
||||
// Step 3
|
||||
|
@ -234,19 +245,28 @@ impl GPUCanvasContext {
|
|||
}
|
||||
}
|
||||
|
||||
fn send_swap_chain_present(&self, texture_id: WebGPUTexture) {
|
||||
fn send_swap_chain_present(
|
||||
&self,
|
||||
texture_id: WebGPUTexture,
|
||||
canvas_epoch: Option<Epoch>,
|
||||
) -> bool {
|
||||
self.drawing_buffer.borrow_mut().cleared = false;
|
||||
let encoder_id = self.global().wgpu_id_hub().create_command_encoder_id();
|
||||
if let Err(e) = self.channel.0.send(WebGPURequest::SwapChainPresent {
|
||||
let send_result = self.channel.0.send(WebGPURequest::SwapChainPresent {
|
||||
context_id: self.context_id,
|
||||
texture_id: texture_id.0,
|
||||
encoder_id,
|
||||
}) {
|
||||
canvas_epoch,
|
||||
});
|
||||
|
||||
if let Err(error) = &send_result {
|
||||
warn!(
|
||||
"Failed to send UpdateWebrenderData({:?}) ({})",
|
||||
self.context_id, e
|
||||
"Failed to send UpdateWebrenderData({:?}) ({error})",
|
||||
self.context_id,
|
||||
);
|
||||
}
|
||||
|
||||
send_result.is_ok()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -258,9 +278,19 @@ impl CanvasContext for GPUCanvasContext {
|
|||
}
|
||||
|
||||
/// <https://gpuweb.github.io/gpuweb/#abstract-opdef-updating-the-rendering-of-a-webgpu-canvas>
|
||||
fn update_rendering(&self) {
|
||||
// Step 1
|
||||
self.expire_current_texture();
|
||||
fn update_rendering(&self, canvas_epoch: Option<Epoch>) -> bool {
|
||||
if !self.onscreen() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 1: Expire the current texture of context.
|
||||
self.expire_current_texture(canvas_epoch)
|
||||
// Step 2: Set context.[[lastPresentedImage]] to context.[[drawingBuffer]].
|
||||
// TODO: Implement this.
|
||||
}
|
||||
|
||||
fn image_key(&self) -> Option<ImageKey> {
|
||||
Some(self.webrender_image)
|
||||
}
|
||||
|
||||
/// <https://gpuweb.github.io/gpuweb/#abstract-opdef-update-the-canvas-size>
|
||||
|
|
|
@ -101,6 +101,7 @@ impl ImageAnimationManager {
|
|||
flags: ImageDescriptorFlags::ALLOW_MIPMAPS,
|
||||
},
|
||||
SerializableImageData::Raw(IpcSharedMemory::from_bytes(frame.bytes)),
|
||||
None,
|
||||
))
|
||||
})
|
||||
.collect();
|
||||
|
|
|
@ -98,6 +98,7 @@ impl MixedMessage {
|
|||
ScriptThreadMessage::EvaluateJavaScript(id, _, _) => Some(*id),
|
||||
ScriptThreadMessage::SendImageKeysBatch(..) => None,
|
||||
ScriptThreadMessage::PreferencesUpdated(..) => None,
|
||||
ScriptThreadMessage::NoLongerWaitingOnAsychronousImageUpdates(_) => None,
|
||||
},
|
||||
MixedMessage::FromScript(inner_msg) => match inner_msg {
|
||||
MainThreadScriptMsg::Common(CommonScriptMsg::Task(_, _, pipeline_id, _)) => {
|
||||
|
|
|
@ -1424,6 +1424,13 @@ impl ScriptThread {
|
|||
)) => {
|
||||
self.set_needs_rendering_update();
|
||||
},
|
||||
MixedMessage::FromConstellation(
|
||||
ScriptThreadMessage::NoLongerWaitingOnAsychronousImageUpdates(pipeline_id),
|
||||
) => {
|
||||
if let Some(document) = self.documents.borrow().find_document(pipeline_id) {
|
||||
document.handle_no_longer_waiting_on_asynchronous_image_updates();
|
||||
}
|
||||
},
|
||||
MixedMessage::FromConstellation(ScriptThreadMessage::SendInputEvent(id, event)) => {
|
||||
self.handle_input_event(id, event)
|
||||
},
|
||||
|
@ -1891,6 +1898,7 @@ impl ScriptThread {
|
|||
msg @ ScriptThreadMessage::ExitFullScreen(..) |
|
||||
msg @ ScriptThreadMessage::SendInputEvent(..) |
|
||||
msg @ ScriptThreadMessage::TickAllAnimations(..) |
|
||||
msg @ ScriptThreadMessage::NoLongerWaitingOnAsychronousImageUpdates(..) |
|
||||
msg @ ScriptThreadMessage::ExitScriptThread => {
|
||||
panic!("should have handled {:?} already", msg)
|
||||
},
|
||||
|
|
|
@ -16,11 +16,14 @@ pub mod print_tree;
|
|||
pub mod text;
|
||||
mod unicode_block;
|
||||
|
||||
use malloc_size_of_derive::MallocSizeOf;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use webrender_api::Epoch as WebRenderEpoch;
|
||||
|
||||
/// A struct for denoting the age of messages; prevents race conditions.
|
||||
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)]
|
||||
#[derive(
|
||||
Clone, Copy, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, MallocSizeOf,
|
||||
)]
|
||||
pub struct Epoch(pub u32);
|
||||
|
||||
impl Epoch {
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
use std::default::Default;
|
||||
use std::str::FromStr;
|
||||
|
||||
use base::Epoch;
|
||||
use euclid::Angle;
|
||||
use euclid::approxeq::ApproxEq;
|
||||
use euclid::default::{Point2D, Rect, Size2D, Transform2D};
|
||||
|
@ -516,7 +517,7 @@ pub enum Canvas2dMsg {
|
|||
CompositionOptions,
|
||||
Transform2D<f64>,
|
||||
),
|
||||
UpdateImage(IpcSender<()>),
|
||||
UpdateImage(Option<Epoch>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, MallocSizeOf, Serialize)]
|
||||
|
|
|
@ -7,6 +7,7 @@ use std::fmt;
|
|||
use std::num::{NonZeroU32, NonZeroU64};
|
||||
use std::ops::Deref;
|
||||
|
||||
use base::Epoch;
|
||||
/// Receiver type used in WebGLCommands.
|
||||
pub use base::generic_channel::GenericReceiver as WebGLReceiver;
|
||||
/// Sender type used in WebGLCommands.
|
||||
|
@ -107,7 +108,7 @@ pub enum WebGLMsg {
|
|||
/// The third field contains the time (in ns) when the request
|
||||
/// was initiated. The u64 in the second field will be the time the
|
||||
/// request is fulfilled
|
||||
SwapBuffers(Vec<WebGLContextId>, WebGLSender<u64>, u64),
|
||||
SwapBuffers(Vec<WebGLContextId>, Option<Epoch>, u64),
|
||||
/// Frees all resources and closes the thread.
|
||||
Exit(IpcSender<()>),
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
use std::fmt::{Debug, Error, Formatter};
|
||||
|
||||
use base::Epoch;
|
||||
use base::id::{PipelineId, WebViewId};
|
||||
use crossbeam_channel::Sender;
|
||||
use embedder_traits::{AnimationState, EventLoopWaker, TouchEventResult};
|
||||
|
@ -131,6 +132,11 @@ pub enum CompositorMsg {
|
|||
GenerateImageKeysForPipeline(PipelineId),
|
||||
/// Perform a resource update operation.
|
||||
UpdateImages(SmallVec<[ImageUpdate; 1]>),
|
||||
/// Pause all pipeline display list processing for the given pipeline until the
|
||||
/// following image updates have been received. This is used to ensure that canvas
|
||||
/// elements have had a chance to update their rendering and send the image update to
|
||||
/// the renderer before their associated display list is actually displayed.
|
||||
DelayNewFrameForCanvas(PipelineId, Epoch, Vec<ImageKey>),
|
||||
|
||||
/// Generate a new batch of font keys which can be used to allocate
|
||||
/// keys asynchronously.
|
||||
|
@ -222,6 +228,21 @@ impl CrossProcessCompositorApi {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn delay_new_frame_for_canvas(
|
||||
&self,
|
||||
pipeline_id: PipelineId,
|
||||
canvas_epoch: Epoch,
|
||||
image_keys: Vec<ImageKey>,
|
||||
) {
|
||||
if let Err(error) = self.0.send(CompositorMsg::DelayNewFrameForCanvas(
|
||||
pipeline_id,
|
||||
canvas_epoch,
|
||||
image_keys,
|
||||
)) {
|
||||
warn!("Error delaying frames for canvas image updates {error:?}");
|
||||
}
|
||||
}
|
||||
|
||||
/// Inform WebRender of a new display list for the given pipeline.
|
||||
pub fn send_display_list(
|
||||
&self,
|
||||
|
@ -296,8 +317,9 @@ impl CrossProcessCompositorApi {
|
|||
key: ImageKey,
|
||||
descriptor: ImageDescriptor,
|
||||
data: SerializableImageData,
|
||||
epoch: Option<Epoch>,
|
||||
) {
|
||||
self.update_images([ImageUpdate::UpdateImage(key, descriptor, data)].into());
|
||||
self.update_images([ImageUpdate::UpdateImage(key, descriptor, data, epoch)].into());
|
||||
}
|
||||
|
||||
pub fn delete_image(&self, key: ImageKey) {
|
||||
|
@ -538,7 +560,31 @@ pub enum ImageUpdate {
|
|||
/// Delete a previously registered image registration.
|
||||
DeleteImage(ImageKey),
|
||||
/// Update an existing image registration.
|
||||
UpdateImage(ImageKey, ImageDescriptor, SerializableImageData),
|
||||
UpdateImage(
|
||||
ImageKey,
|
||||
ImageDescriptor,
|
||||
SerializableImageData,
|
||||
Option<Epoch>,
|
||||
),
|
||||
}
|
||||
|
||||
impl Debug for ImageUpdate {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::AddImage(image_key, image_desc, _) => f
|
||||
.debug_tuple("AddImage")
|
||||
.field(image_key)
|
||||
.field(image_desc)
|
||||
.finish(),
|
||||
Self::DeleteImage(image_key) => f.debug_tuple("DeleteImage").field(image_key).finish(),
|
||||
Self::UpdateImage(image_key, image_desc, _, epoch) => f
|
||||
.debug_tuple("UpdateImage")
|
||||
.field(image_key)
|
||||
.field(image_desc)
|
||||
.field(epoch)
|
||||
.finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
|
|
|
@ -57,6 +57,10 @@ pub enum EmbedderToConstellationMessage {
|
|||
/// Requests that the constellation instruct script/layout to try to layout again and tick
|
||||
/// animations.
|
||||
TickAnimation(Vec<WebViewId>),
|
||||
/// Notify the `ScriptThread` that the Servo renderer is no longer waiting on
|
||||
/// asynchronous image uploads for the given `Pipeline`. These are mainly used
|
||||
/// by canvas to perform uploads while the display list is being built.
|
||||
NoLongerWaitingOnAsynchronousImageUpdates(Vec<PipelineId>),
|
||||
/// Dispatch a webdriver command
|
||||
WebDriverCommand(WebDriverCommandMsg),
|
||||
/// Reload a top-level browsing context.
|
||||
|
|
|
@ -261,6 +261,10 @@ pub enum ScriptThreadMessage {
|
|||
SendImageKeysBatch(PipelineId, Vec<ImageKey>),
|
||||
/// Preferences were updated in the parent process.
|
||||
PreferencesUpdated(Vec<(String, PrefValue)>),
|
||||
/// Notify the `ScriptThread` that the Servo renderer is no longer waiting on
|
||||
/// asynchronous image uploads for the given `Pipeline`. These are mainly used
|
||||
/// by canvas to perform uploads while the display list is being built.
|
||||
NoLongerWaitingOnAsychronousImageUpdates(PipelineId),
|
||||
}
|
||||
|
||||
impl fmt::Debug for ScriptThreadMessage {
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
//! (usually from the ScriptThread, and more specifically from DOM objects)
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use base::Epoch;
|
||||
use base::id::PipelineId;
|
||||
use ipc_channel::ipc::{IpcSender, IpcSharedMemory};
|
||||
use pixels::IpcSnapshot;
|
||||
|
@ -162,6 +163,7 @@ pub enum WebGPURequest {
|
|||
context_id: WebGPUContextId,
|
||||
texture_id: TextureId,
|
||||
encoder_id: CommandEncoderId,
|
||||
canvas_epoch: Option<Epoch>,
|
||||
},
|
||||
/// Obtains image from latest presentation buffer (same as wr update)
|
||||
GetImage {
|
||||
|
|
|
@ -16,7 +16,7 @@ webgl_backtrace = ["canvas_traits/webgl_backtrace"]
|
|||
webxr = ["dep:webxr", "dep:webxr-api"]
|
||||
|
||||
[dependencies]
|
||||
base = { path = "../shared/base" }
|
||||
base = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
byteorder = { workspace = true }
|
||||
canvas_traits = { workspace = true }
|
||||
|
|
|
@ -8,6 +8,7 @@ use std::rc::Rc;
|
|||
use std::sync::{Arc, Mutex};
|
||||
use std::{slice, thread};
|
||||
|
||||
use base::Epoch;
|
||||
use base::generic_channel::RoutedReceiver;
|
||||
use bitflags::bitflags;
|
||||
use byteorder::{ByteOrder, NativeEndian, WriteBytesExt};
|
||||
|
@ -381,8 +382,8 @@ impl WebGLThread {
|
|||
#[cfg(feature = "webxr")]
|
||||
self.handle_webxr_command(_command);
|
||||
},
|
||||
WebGLMsg::SwapBuffers(swap_ids, sender, sent_time) => {
|
||||
self.handle_swap_buffers(swap_ids, sender, sent_time);
|
||||
WebGLMsg::SwapBuffers(swap_ids, canvas_epoch, sent_time) => {
|
||||
self.handle_swap_buffers(canvas_epoch, swap_ids, sent_time);
|
||||
},
|
||||
WebGLMsg::Exit(sender) => {
|
||||
// Call remove_context functions in order to correctly delete WebRender image keys.
|
||||
|
@ -711,7 +712,7 @@ impl WebGLThread {
|
|||
.state
|
||||
.requested_flags
|
||||
.contains(ContextAttributeFlags::ALPHA);
|
||||
self.update_wr_image_for_context(context_id, size.to_i32(), has_alpha);
|
||||
self.update_wr_image_for_context(context_id, size.to_i32(), has_alpha, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -766,8 +767,8 @@ impl WebGLThread {
|
|||
|
||||
fn handle_swap_buffers(
|
||||
&mut self,
|
||||
canvas_epoch: Option<Epoch>,
|
||||
context_ids: Vec<WebGLContextId>,
|
||||
completed_sender: WebGLSender<u64>,
|
||||
_sent_time: u64,
|
||||
) {
|
||||
debug!("handle_swap_buffers()");
|
||||
|
@ -846,12 +847,11 @@ impl WebGLThread {
|
|||
.state
|
||||
.requested_flags
|
||||
.contains(ContextAttributeFlags::ALPHA);
|
||||
self.update_wr_image_for_context(context_id, size, has_alpha);
|
||||
self.update_wr_image_for_context(context_id, size, has_alpha, canvas_epoch);
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
let mut end_swap = 0;
|
||||
completed_sender.send(end_swap).unwrap();
|
||||
}
|
||||
|
||||
/// Which access mode to use
|
||||
|
@ -921,6 +921,7 @@ impl WebGLThread {
|
|||
context_id: WebGLContextId,
|
||||
size: Size2D<i32>,
|
||||
has_alpha: bool,
|
||||
canvas_epoch: Option<Epoch>,
|
||||
) {
|
||||
let info = self.cached_context_info.get(&context_id).unwrap();
|
||||
let image_buffer_kind = current_wr_image_buffer_kind(&self.device);
|
||||
|
@ -929,7 +930,7 @@ impl WebGLThread {
|
|||
let image_data = Self::external_image_data(context_id, image_buffer_kind);
|
||||
|
||||
self.compositor_api
|
||||
.update_image(info.image_key, descriptor, image_data);
|
||||
.update_image(info.image_key, descriptor, image_data, canvas_epoch);
|
||||
}
|
||||
|
||||
/// Helper function to create a `ImageDescriptor`.
|
||||
|
|
|
@ -8,6 +8,7 @@ use std::slice;
|
|||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use base::Epoch;
|
||||
use compositing_traits::{
|
||||
CrossProcessCompositorApi, ExternalImageSource, SerializableImageData,
|
||||
WebrenderExternalImageApi,
|
||||
|
@ -425,6 +426,7 @@ impl crate::WGPU {
|
|||
context_data.image_key,
|
||||
context_data.image_desc.0,
|
||||
SerializableImageData::External(context_data.image_data),
|
||||
None,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -435,6 +437,7 @@ impl crate::WGPU {
|
|||
context_id: WebGPUContextId,
|
||||
encoder_id: id::Id<id::markers::CommandEncoder>,
|
||||
texture_id: id::Id<id::markers::Texture>,
|
||||
canvas_epoch: Option<Epoch>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
fn err<T: std::error::Error + 'static>(e: Option<T>) -> Result<(), T> {
|
||||
if let Some(error) = e {
|
||||
|
@ -518,6 +521,7 @@ impl crate::WGPU {
|
|||
compositor_api,
|
||||
image_desc,
|
||||
presentation_id,
|
||||
canvas_epoch,
|
||||
);
|
||||
})
|
||||
};
|
||||
|
@ -550,6 +554,7 @@ fn update_wr_image(
|
|||
compositor_api: CrossProcessCompositorApi,
|
||||
image_desc: WebGPUImageDescriptor,
|
||||
presentation_id: PresentationId,
|
||||
canvas_epoch: Option<Epoch>,
|
||||
) {
|
||||
match result {
|
||||
Ok(()) => {
|
||||
|
@ -577,6 +582,7 @@ fn update_wr_image(
|
|||
context_data.image_key,
|
||||
context_data.image_desc.0,
|
||||
SerializableImageData::External(context_data.image_data),
|
||||
canvas_epoch,
|
||||
);
|
||||
if let Some(old_presentation_buffer) = old_presentation_buffer {
|
||||
context_data.unmap_old_buffer(old_presentation_buffer)
|
||||
|
|
|
@ -520,8 +520,14 @@ impl WGPU {
|
|||
context_id,
|
||||
texture_id,
|
||||
encoder_id,
|
||||
canvas_epoch,
|
||||
} => {
|
||||
let result = self.swapchain_present(context_id, encoder_id, texture_id);
|
||||
let result = self.swapchain_present(
|
||||
context_id,
|
||||
encoder_id,
|
||||
texture_id,
|
||||
canvas_epoch,
|
||||
);
|
||||
if let Err(e) = result {
|
||||
log::error!("Error occured in SwapChainPresent: {e:?}");
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
[too-small-buffers.html]
|
||||
expected: TIMEOUT
|
||||
[WebGL test #100]
|
||||
expected: FAIL
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue