Introduce snapshot concept of canvas (#36119)

Each canvas context returns snapshot instead of just raw bytes. This
allows as to hold off conversions (BGRA <-> RGBA, (un)premultiply) to
when/if they are actually needed. For example when loading snapshot into
webgl we can load both RGBA and BGRA so no conversion is really needed.

Currently whole thing is designed to be able to be extend on
https://github.com/servo/ipc-channel/pull/356, to make less copies.
Hence some commented out code.


Fixes #35759
There are tests for these changes in WPT

---------

Signed-off-by: sagudev <16504129+sagudev@users.noreply.github.com>
This commit is contained in:
sagudev 2025-04-23 09:32:47 +02:00 committed by GitHub
parent b6967fc4c8
commit 73b778e67f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 724 additions and 251 deletions

View file

@ -114,6 +114,7 @@ servo_config = { path = "../config" }
servo_geometry = { path = "../geometry" }
servo_rand = { path = "../rand" }
servo_url = { path = "../url" }
snapshot = { workspace = true }
smallvec = { workspace = true, features = ["union"] }
strum = { workspace = true }
strum_macros = { workspace = true }

View file

@ -5,8 +5,8 @@
//! Common interfaces for Canvas Contexts
use euclid::default::Size2D;
use ipc_channel::ipc::IpcSharedMemory;
use script_layout_interface::{HTMLCanvasData, HTMLCanvasDataSource};
use snapshot::Snapshot;
use crate::dom::bindings::codegen::UnionTypes::HTMLCanvasElementOrOffscreenCanvas;
use crate::dom::bindings::inheritance::Castable;
@ -30,11 +30,10 @@ pub(crate) trait CanvasContext {
fn resize(&self);
fn get_image_data_as_shared_memory(&self) -> Option<IpcSharedMemory>;
fn get_image_data(&self) -> Option<Vec<u8>> {
self.get_image_data_as_shared_memory().map(|sm| sm.to_vec())
}
/// Returns none if area of canvas is zero.
///
/// In case of other errors it returns cleared snapshot
fn get_image_data(&self) -> Option<Snapshot>;
fn origin_is_clean(&self) -> bool {
true

View file

@ -17,7 +17,7 @@ use cssparser::color::clamp_unit_f32;
use cssparser::{Parser, ParserInput};
use euclid::default::{Point2D, Rect, Size2D, Transform2D};
use euclid::vec2;
use ipc_channel::ipc::{self, IpcSender, IpcSharedMemory};
use ipc_channel::ipc::{self, IpcSender};
use net_traits::image_cache::{ImageCache, ImageResponse};
use net_traits::request::CorsSettings;
use pixels::PixelFormat;
@ -298,7 +298,7 @@ impl CanvasState {
&self,
url: ServoUrl,
cors_setting: Option<CorsSettings>,
) -> Option<(IpcSharedMemory, Size2D<u32>)> {
) -> Option<snapshot::Snapshot> {
let img = match self.request_image_from_cache(url, cors_setting) {
ImageResponse::Loaded(img, _) => img,
ImageResponse::PlaceholderLoaded(_, _) |
@ -308,13 +308,22 @@ impl CanvasState {
},
};
let image_size = Size2D::new(img.width, img.height);
let image_data = match img.format {
PixelFormat::BGRA8 => img.bytes(),
let size = Size2D::new(img.width, img.height);
let format = match img.format {
PixelFormat::BGRA8 => snapshot::PixelFormat::BGRA,
PixelFormat::RGBA8 => snapshot::PixelFormat::RGBA,
pixel_format => unimplemented!("unsupported pixel format ({:?})", pixel_format),
};
let alpha_mode = snapshot::AlphaMode::Transparent {
premultiplied: false,
};
Some((image_data, image_size))
Some(snapshot::Snapshot::from_shared_memory(
size.cast(),
format,
alpha_mode,
img.bytes(),
))
}
fn request_image_from_cache(
@ -341,13 +350,16 @@ impl CanvasState {
assert!(Rect::from_size(canvas_size).contains_rect(&rect));
let (sender, receiver) = ipc::bytes_channel().unwrap();
let (sender, receiver) = ipc::channel().unwrap();
self.send_canvas_2d_msg(Canvas2dMsg::GetImageData(rect, canvas_size, sender));
let mut pixels = receiver.recv().unwrap().to_vec();
pixels::unmultiply_inplace::<true>(&mut pixels);
pixels
let mut snapshot = receiver.recv().unwrap().to_owned();
snapshot.transform(
snapshot::AlphaMode::Transparent {
premultiplied: false,
},
snapshot::PixelFormat::RGBA,
);
snapshot.to_vec()
}
///
@ -594,10 +606,10 @@ impl CanvasState {
dh: Option<f64>,
) -> ErrorResult {
debug!("Fetching image {}.", url);
let (image_data, image_size) = self
let snapshot = self
.fetch_image_data(url, cors_setting)
.ok_or(Error::InvalidState)?;
let image_size = image_size.to_f64();
let image_size = snapshot.size().to_f64();
let dw = dw.unwrap_or(image_size.width);
let dh = dh.unwrap_or(image_size.height);
@ -614,8 +626,7 @@ impl CanvasState {
let smoothing_enabled = self.state.borrow().image_smoothing_enabled;
self.send_canvas_2d_msg(Canvas2dMsg::DrawImage(
image_data,
image_size,
snapshot.as_ipc(),
dest_rect,
source_rect,
smoothing_enabled,
@ -929,7 +940,7 @@ impl CanvasState {
mut repetition: DOMString,
can_gc: CanGc,
) -> Fallible<Option<DomRoot<CanvasPattern>>> {
let (image_data, image_size) = match image {
let snapshot = match image {
CanvasImageSource::HTMLImageElement(ref image) => {
// https://html.spec.whatwg.org/multipage/#check-the-usability-of-the-image-argument
if !image.is_usable()? {
@ -941,27 +952,17 @@ impl CanvasState {
.and_then(|url| {
self.fetch_image_data(url, cors_setting_for_element(image.upcast()))
})
.map(|data| (data.0.to_vec(), data.1))
.ok_or(Error::InvalidState)?
},
CanvasImageSource::HTMLCanvasElement(ref canvas) => {
let (data, size) = canvas.fetch_all_data().ok_or(Error::InvalidState)?;
let data = data
.map(|data| data.to_vec())
.unwrap_or_else(|| vec![0; size.area() as usize * 4]);
(data, size)
canvas.get_image_data().ok_or(Error::InvalidState)?
},
CanvasImageSource::OffscreenCanvas(ref canvas) => {
let (data, size) = canvas.fetch_all_data().ok_or(Error::InvalidState)?;
let data = data
.map(|data| data.to_vec())
.unwrap_or_else(|| vec![0; size.area() as usize * 4]);
(data, size)
canvas.get_image_data().ok_or(Error::InvalidState)?
},
CanvasImageSource::CSSStyleValue(ref value) => value
.get_url(self.base_url.clone())
.and_then(|url| self.fetch_image_data(url, None))
.map(|data| (data.0.to_vec(), data.1))
.ok_or(Error::InvalidState)?,
};
@ -970,10 +971,11 @@ impl CanvasState {
}
if let Ok(rep) = RepetitionStyle::from_str(&repetition) {
let size = snapshot.size();
Ok(Some(CanvasPattern::new(
global,
image_data,
image_size,
snapshot.to_vec(),
size.cast(),
rep,
self.is_origin_clean(image),
can_gc,

View file

@ -5,11 +5,11 @@
use canvas_traits::canvas::{Canvas2dMsg, CanvasId, CanvasMsg, FromScriptMsg};
use dom_struct::dom_struct;
use euclid::default::{Point2D, Rect, Size2D};
use ipc_channel::ipc::IpcSharedMemory;
use profile_traits::ipc;
use script_bindings::inheritance::Castable;
use script_layout_interface::HTMLCanvasDataSource;
use servo_url::ServoUrl;
use snapshot::Snapshot;
use crate::canvas_context::{CanvasContext, CanvasHelpers, LayoutCanvasRenderingContextHelpers};
use crate::canvas_state::CanvasState;
@ -142,16 +142,18 @@ impl CanvasContext for CanvasRenderingContext2D {
self.set_bitmap_dimensions(self.size().cast())
}
fn get_image_data_as_shared_memory(&self) -> Option<IpcSharedMemory> {
fn get_image_data(&self) -> Option<Snapshot> {
let size = self.size();
if size.is_empty() {
return None;
}
let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap();
let msg = CanvasMsg::FromScript(FromScriptMsg::SendPixels(sender), self.get_canvas_id());
self.canvas_state.get_ipc_renderer().send(msg).unwrap();
Some(receiver.recv().unwrap())
}
fn get_image_data(&self) -> Option<Vec<u8>> {
Some(self.get_rect(Rect::from_size(self.size().cast())))
Some(receiver.recv().unwrap().to_owned())
}
fn origin_is_clean(&self) -> bool {

View file

@ -2880,15 +2880,11 @@ impl GlobalScope {
return p;
}
if let Some((data, size)) = canvas.fetch_all_data() {
let data = data
.map(|data| data.to_vec())
.unwrap_or_else(|| vec![0; size.area() as usize * 4]);
if let Some(snapshot) = canvas.get_image_data() {
let size = snapshot.size().cast();
let image_bitmap =
ImageBitmap::new(self, size.width, size.height, can_gc).unwrap();
image_bitmap.set_bitmap_data(data);
image_bitmap.set_bitmap_data(snapshot.to_vec());
image_bitmap.set_origin_clean(canvas.origin_is_clean());
p.resolve_native(&(image_bitmap), can_gc);
}
@ -2901,14 +2897,11 @@ impl GlobalScope {
return p;
}
if let Some((data, size)) = canvas.fetch_all_data() {
let data = data
.map(|data| data.to_vec())
.unwrap_or_else(|| vec![0; size.area() as usize * 4]);
if let Some(snapshot) = canvas.get_image_data() {
let size = snapshot.size().cast();
let image_bitmap =
ImageBitmap::new(self, size.width, size.height, can_gc).unwrap();
image_bitmap.set_bitmap_data(data);
image_bitmap.set_bitmap_data(snapshot.to_vec());
image_bitmap.set_origin_clean(canvas.origin_is_clean());
p.resolve_native(&(image_bitmap), can_gc);
}

View file

@ -17,7 +17,6 @@ use image::codecs::jpeg::JpegEncoder;
use image::codecs::png::PngEncoder;
use image::codecs::webp::WebPEncoder;
use image::{ColorType, ImageEncoder};
use ipc_channel::ipc::IpcSharedMemory;
#[cfg(feature = "webgpu")]
use ipc_channel::ipc::{self as ipcchan};
use js::error::throw_type_error;
@ -25,6 +24,7 @@ use js::rust::{HandleObject, HandleValue};
use script_layout_interface::{HTMLCanvasData, HTMLCanvasDataSource};
use servo_media::streams::MediaStreamType;
use servo_media::streams::registry::MediaStreamId;
use snapshot::Snapshot;
use style::attr::AttrValue;
use crate::canvas_context::CanvasContext as _;
@ -69,6 +69,7 @@ use crate::script_runtime::{CanGc, JSContext};
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[derive(PartialEq)]
enum EncodedImageType {
Png,
Jpeg,
@ -375,42 +376,21 @@ impl HTMLCanvasElement {
self.Height() != 0 && self.Width() != 0
}
pub(crate) fn fetch_all_data(&self) -> Option<(Option<IpcSharedMemory>, Size2D<u32>)> {
let size = self.get_size();
if size.width == 0 || size.height == 0 {
return None;
}
let data = match self.context.borrow().as_ref() {
Some(CanvasContext::Context2d(context)) => context.get_image_data_as_shared_memory(),
Some(CanvasContext::WebGL(_context)) => {
// TODO: add a method in WebGLRenderingContext to get the pixels.
return None;
},
Some(CanvasContext::WebGL2(_context)) => {
// TODO: add a method in WebGL2RenderingContext to get the pixels.
return None;
},
pub(crate) fn get_image_data(&self) -> Option<Snapshot> {
match self.context.borrow().as_ref() {
Some(CanvasContext::Context2d(context)) => context.get_image_data(),
Some(CanvasContext::WebGL(context)) => context.get_image_data(),
Some(CanvasContext::WebGL2(context)) => context.get_image_data(),
#[cfg(feature = "webgpu")]
Some(CanvasContext::WebGPU(context)) => context.get_image_data_as_shared_memory(),
Some(CanvasContext::Placeholder(context)) => return context.fetch_all_data(),
None => None,
};
Some((data, size))
}
fn get_content(&self) -> Option<Vec<u8>> {
match *self.context.borrow() {
Some(CanvasContext::Context2d(ref context)) => context.get_image_data(),
Some(CanvasContext::WebGL(ref context)) => context.get_image_data(),
Some(CanvasContext::WebGL2(ref context)) => context.get_image_data(),
#[cfg(feature = "webgpu")]
Some(CanvasContext::WebGPU(ref context)) => context.get_image_data(),
Some(CanvasContext::Placeholder(_)) | None => {
// Each pixel is fully-transparent black.
Some(vec![0; (self.Width() * self.Height() * 4) as usize])
Some(CanvasContext::WebGPU(context)) => context.get_image_data(),
Some(CanvasContext::Placeholder(context)) => context.get_image_data(),
None => {
let size = self.get_size();
if size.width == 0 || size.height == 0 {
None
} else {
Some(Snapshot::cleared(size.cast()))
}
},
}
}
@ -560,11 +540,23 @@ impl HTMLCanvasElementMethods<crate::DomTypeHolder> for HTMLCanvasElement {
}
// Step 3.
let Some(file) = self.get_content() else {
let Some(mut snapshot) = self.get_image_data() else {
return Ok(USVString("data:,".into()));
};
let image_type = EncodedImageType::from(mime_type);
snapshot.transform(
if image_type == EncodedImageType::Jpeg {
snapshot::AlphaMode::AsOpaque {
premultiplied: true,
}
} else {
snapshot::AlphaMode::Transparent {
premultiplied: false,
}
},
snapshot::PixelFormat::RGBA,
);
let mut url = format!("data:{};base64,", image_type.as_mime_type());
let mut encoder = base64::write::EncoderStringWriter::from_consumer(
@ -575,7 +567,7 @@ impl HTMLCanvasElementMethods<crate::DomTypeHolder> for HTMLCanvasElement {
self.encode_for_mime_type(
&image_type,
Self::maybe_quality(quality),
&file,
snapshot.data(),
&mut encoder,
);
encoder.into_inner();
@ -604,7 +596,7 @@ impl HTMLCanvasElementMethods<crate::DomTypeHolder> for HTMLCanvasElement {
let result = if self.Width() == 0 || self.Height() == 0 {
None
} else {
self.get_content()
self.get_image_data()
};
let this = Trusted::new(self);
@ -625,13 +617,17 @@ impl HTMLCanvasElementMethods<crate::DomTypeHolder> for HTMLCanvasElement {
return error!("Expected blob callback, but found none!");
};
if let Some(bytes) = result {
if let Some(mut snapshot) = result {
snapshot.transform(
snapshot::AlphaMode::Transparent{ premultiplied: false },
snapshot::PixelFormat::RGBA
);
// Step 4.1
// If result is non-null, then set result to a serialization of result as a file with
// type and quality if given.
let mut encoded: Vec<u8> = vec![];
this.encode_for_mime_type(&image_type, quality, &bytes, &mut encoded);
this.encode_for_mime_type(&image_type, quality, snapshot.data(), &mut encoded);
let blob_impl = BlobImpl::new_from_bytes(encoded, image_type.as_mime_type());
// Step 4.2.1 & 4.2.2
// Set result to a new Blob object, created in the relevant realm of this canvas element

View file

@ -6,8 +6,8 @@ use std::cell::Cell;
use dom_struct::dom_struct;
use euclid::default::Size2D;
use ipc_channel::ipc::IpcSharedMemory;
use js::rust::{HandleObject, HandleValue};
use snapshot::Snapshot;
use crate::dom::bindings::cell::{DomRefCell, Ref, ref_filter_map};
use crate::dom::bindings::codegen::Bindings::OffscreenCanvasBinding::{
@ -88,21 +88,18 @@ impl OffscreenCanvas {
ref_filter_map(self.context.borrow(), |ctx| ctx.as_ref())
}
pub(crate) fn fetch_all_data(&self) -> Option<(Option<IpcSharedMemory>, Size2D<u32>)> {
let size = self.get_size();
if size.width == 0 || size.height == 0 {
return None;
}
let data = match self.context.borrow().as_ref() {
Some(OffscreenCanvasContext::OffscreenContext2d(context)) => {
context.get_image_data_as_shared_memory()
pub(crate) fn get_image_data(&self) -> Option<Snapshot> {
match self.context.borrow().as_ref() {
Some(OffscreenCanvasContext::OffscreenContext2d(context)) => context.get_image_data(),
None => {
let size = self.get_size();
if size.width == 0 || size.height == 0 {
None
} else {
Some(Snapshot::cleared(size))
}
},
None => None,
};
Some((data, size.to_u32()))
}
}
pub(crate) fn get_or_init_2d_context(

View file

@ -8,7 +8,7 @@ use crate::dom::bindings::codegen::UnionTypes::HTMLCanvasElementOrOffscreenCanva
use canvas_traits::canvas::Canvas2dMsg;
use dom_struct::dom_struct;
use euclid::default::Size2D;
use ipc_channel::ipc::IpcSharedMemory;
use snapshot::Snapshot;
use crate::dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::{
CanvasDirection, CanvasFillRule, CanvasImageSource, CanvasLineCap, CanvasLineJoin,
@ -76,8 +76,8 @@ impl OffscreenCanvasRenderingContext2D {
self.context.origin_is_clean()
}
pub(crate) fn get_image_data_as_shared_memory(&self) -> Option<IpcSharedMemory> {
self.context.get_image_data_as_shared_memory()
pub(crate) fn get_image_data(&self) -> Option<Snapshot> {
self.context.get_image_data()
}
}

View file

@ -24,6 +24,7 @@ use js::typedarray::{ArrayBufferView, CreateWith, Float32, Int32Array, Uint32, U
use script_bindings::interfaces::WebGL2RenderingContextHelpers;
use script_layout_interface::HTMLCanvasDataSource;
use servo_config::pref;
use snapshot::Snapshot;
use url::Host;
use crate::canvas_context::CanvasContext;
@ -549,11 +550,11 @@ impl WebGL2RenderingContext {
return
);
let (sender, receiver) = ipc::bytes_channel().unwrap();
let (sender, receiver) = ipc::channel().unwrap();
self.base.send_command(WebGLCommand::ReadPixels(
src_rect, format, pixel_type, sender,
));
let src = receiver.recv().unwrap();
let (src, _) = receiver.recv().unwrap();
for i in 0..src_rect.size.height as usize {
let src_start = i * src_row_bytes as usize;
@ -916,11 +917,7 @@ impl CanvasContext for WebGL2RenderingContext {
self.base.resize();
}
fn get_image_data_as_shared_memory(&self) -> Option<IpcSharedMemory> {
self.base.get_image_data_as_shared_memory()
}
fn get_image_data(&self) -> Option<Vec<u8>> {
fn get_image_data(&self) -> Option<Snapshot> {
self.base.get_image_data()
}

View file

@ -34,6 +34,7 @@ use pixels::{self, PixelFormat};
use script_layout_interface::HTMLCanvasDataSource;
use serde::{Deserialize, Serialize};
use servo_config::pref;
use snapshot::Snapshot;
use webrender_api::ImageKey;
use crate::canvas_context::CanvasContext;
@ -628,11 +629,15 @@ impl WebGLRenderingContext {
if !canvas.origin_is_clean() {
return Err(Error::Security);
}
if let Some((data, size)) = canvas.fetch_all_data() {
let data = data.unwrap_or_else(|| {
IpcSharedMemory::from_bytes(&vec![0; size.area() as usize * 4])
});
TexPixels::new(data, size, PixelFormat::BGRA8, true)
if let Some(snapshot) = canvas.get_image_data() {
let snapshot = snapshot.as_ipc();
let size = snapshot.size().cast();
let format = match snapshot.format() {
snapshot::PixelFormat::RGBA => PixelFormat::RGBA8,
snapshot::PixelFormat::BGRA => PixelFormat::BGRA8,
};
let premultiply = snapshot.alpha_mode().is_premultiplied();
TexPixels::new(snapshot.to_ipc_shared_memory(), size, format, premultiply)
} else {
return Ok(None);
}
@ -1922,18 +1927,13 @@ impl CanvasContext for WebGLRenderingContext {
}
}
fn get_image_data_as_shared_memory(&self) -> Option<IpcSharedMemory> {
// TODO: add a method in WebGLRenderingContext to get the pixels.
None
}
// Used by HTMLCanvasElement.toDataURL
//
// This emits errors quite liberally, but the spec says that this operation
// can fail and that it is UB what happens in that case.
//
// https://www.khronos.org/registry/webgl/specs/latest/1.0/#2.2
fn get_image_data(&self) -> Option<Vec<u8>> {
fn get_image_data(&self) -> Option<Snapshot> {
handle_potential_webgl_error!(self, self.validate_framebuffer(), return None);
let mut size = self.size().cast();
@ -1945,14 +1945,20 @@ impl CanvasContext for WebGLRenderingContext {
size.width = cmp::min(size.width, fb_width as u32);
size.height = cmp::min(size.height, fb_height as u32);
let (sender, receiver) = ipc::bytes_channel().unwrap();
let (sender, receiver) = ipc::channel().unwrap();
self.send_command(WebGLCommand::ReadPixels(
Rect::from_size(size),
constants::RGBA,
constants::UNSIGNED_BYTE,
sender,
));
Some(receiver.recv().unwrap())
let (data, alpha_mode) = receiver.recv().unwrap();
Some(Snapshot::from_vec(
size.cast(),
snapshot::PixelFormat::RGBA,
alpha_mode,
data.to_vec(),
))
}
fn mark_as_dirty(&self) {
@ -3826,11 +3832,11 @@ impl WebGLRenderingContextMethods<crate::DomTypeHolder> for WebGLRenderingContex
dest_offset += -y * row_len;
}
let (sender, receiver) = ipc::bytes_channel().unwrap();
let (sender, receiver) = ipc::channel().unwrap();
self.send_command(WebGLCommand::ReadPixels(
src_rect, format, pixel_type, sender,
));
let src = receiver.recv().unwrap();
let (src, _) = receiver.recv().unwrap();
let src_row_len = src_rect.size.width as usize * bytes_per_pixel as usize;
for i in 0..src_rect.size.height {

View file

@ -7,8 +7,9 @@ use std::cell::RefCell;
use arrayvec::ArrayVec;
use dom_struct::dom_struct;
use ipc_channel::ipc::{self, IpcSharedMemory};
use ipc_channel::ipc::{self};
use script_layout_interface::HTMLCanvasDataSource;
use snapshot::Snapshot;
use webgpu_traits::{
ContextConfiguration, PRESENTATION_BUFFER_COUNT, WebGPU, WebGPUContextId, WebGPURequest,
WebGPUTexture,
@ -277,10 +278,10 @@ impl CanvasContext for GPUCanvasContext {
}
/// <https://gpuweb.github.io/gpuweb/#ref-for-abstract-opdef-get-a-copy-of-the-image-contents-of-a-context%E2%91%A5>
fn get_image_data_as_shared_memory(&self) -> Option<IpcSharedMemory> {
fn get_image_data(&self) -> Option<Snapshot> {
// 1. Return a copy of the image contents of context.
Some(if self.drawing_buffer.borrow().cleared {
IpcSharedMemory::from_byte(0, self.size().area() as usize * 4)
Snapshot::cleared(self.size())
} else {
let (sender, receiver) = ipc::channel().unwrap();
self.channel
@ -290,7 +291,7 @@ impl CanvasContext for GPUCanvasContext {
sender,
})
.unwrap();
receiver.recv().unwrap()
receiver.recv().unwrap().to_owned()
})
}