Implement GPUSwapChain and GPUCanvasContext and interface with Webrender

This commit is contained in:
Kunal Mohan 2020-06-04 19:28:25 +05:30
parent 73760ea594
commit 71401e0855
28 changed files with 882 additions and 91 deletions

4
Cargo.lock generated
View file

@ -832,6 +832,7 @@ dependencies = [
"style_traits",
"webgpu",
"webrender_api",
"webrender_traits",
"webxr-api",
]
@ -6389,12 +6390,15 @@ name = "webgpu"
version = "0.0.1"
dependencies = [
"arrayvec 0.5.1",
"euclid",
"ipc-channel",
"log",
"malloc_size_of",
"serde",
"servo_config",
"smallvec 0.6.13",
"webrender_api",
"webrender_traits",
"wgpu-core",
"wgpu-types",
]

View file

@ -22,7 +22,9 @@ use surfman_chains::SwapChains;
use surfman_chains_api::SwapChainAPI;
use surfman_chains_api::SwapChainsAPI;
use webrender_surfman::WebrenderSurfman;
use webrender_traits::{WebrenderExternalImageApi, WebrenderExternalImageRegistry};
use webrender_traits::{
WebrenderExternalImageApi, WebrenderExternalImageRegistry, WebrenderImageSource,
};
use webxr_api::SwapChainId as WebXRSwapChainId;
pub struct WebGLComm {
@ -138,9 +140,10 @@ impl WebGLExternalImages {
}
impl WebrenderExternalImageApi for WebGLExternalImages {
fn lock(&mut self, id: u64) -> (u32, Size2D<i32>) {
fn lock(&mut self, id: u64) -> (WebrenderImageSource, Size2D<i32>) {
let id = WebGLContextId(id);
self.lock_swap_chain(id).unwrap_or_default()
let (texture_id, size) = self.lock_swap_chain(id).unwrap_or_default();
(WebrenderImageSource::TextureHandle(texture_id), size)
}
fn unlock(&mut self, id: u64) {

View file

@ -43,6 +43,7 @@ servo_url = { path = "../url" }
style_traits = { path = "../style_traits" }
webgpu = { path = "../webgpu" }
webrender_api = { git = "https://github.com/servo/webrender" }
webrender_traits = { path = "../webrender_traits" }
webxr-api = { git = "https://github.com/servo/webxr", features = ["ipc"] }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "ios"), not(target_os="android"), not(target_arch="arm"), not(target_arch="aarch64")))'.dependencies]

View file

@ -172,11 +172,12 @@ use std::marker::PhantomData;
use std::mem::replace;
use std::process;
use std::rc::{Rc, Weak};
use std::sync::Arc;
use std::sync::{Arc, Mutex};
use std::thread;
use style_traits::viewport::ViewportConstraints;
use style_traits::CSSPixel;
use webgpu::{WebGPU, WebGPURequest};
use webgpu::{self, WebGPU, WebGPURequest};
use webrender_traits::WebrenderExternalImageRegistry;
type PendingApprovalNavigations = HashMap<PipelineId, (LoadData, HistoryEntryReplacement)>;
@ -216,6 +217,18 @@ struct MessagePortInfo {
entangled_with: Option<MessagePortId>,
}
/// Webrender related objects required by WebGPU threads
struct WebrenderWGPU {
/// Webrender API.
webrender_api: webrender_api::RenderApi,
/// List of Webrender external images
webrender_external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
/// WebGPU data that supplied to Webrender for rendering
wgpu_image_map: Arc<Mutex<HashMap<u64, webgpu::PresentationData>>>,
}
/// Servo supports tabs (referred to as browsers), so `Constellation` needs to
/// store browser specific data for bookkeeping.
struct Browser {
@ -384,6 +397,9 @@ pub struct Constellation<Message, LTF, STF, SWF> {
/// A single WebRender document the constellation operates on.
webrender_document: webrender_api::DocumentId,
/// Webrender related objects required by WebGPU threads
webrender_wgpu: WebrenderWGPU,
/// A channel for content processes to send messages that will
/// be relayed to the WebRender thread.
webrender_api_ipc_sender: script_traits::WebrenderIpcSender,
@ -533,6 +549,12 @@ pub struct InitialConstellationState {
/// Webrender document ID.
pub webrender_document: webrender_api::DocumentId,
/// Webrender API.
pub webrender_api_sender: webrender_api::RenderApiSender,
/// Webrender external images
pub webrender_external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
/// Entry point to create and get channels to a WebGLThread.
pub webgl_threads: Option<WebGLThreads>,
@ -549,6 +571,8 @@ pub struct InitialConstellationState {
/// User agent string to report in network requests.
pub user_agent: Cow<'static, str>,
pub wgpu_image_map: Arc<Mutex<HashMap<u64, webgpu::PresentationData>>>,
}
/// Data needed for webdriver
@ -834,6 +858,12 @@ where
}),
);
let webrender_wgpu = WebrenderWGPU {
webrender_api: state.webrender_api_sender.create_api(),
webrender_external_images: state.webrender_external_images,
wgpu_image_map: state.wgpu_image_map,
};
let mut constellation: Constellation<Message, LTF, STF, SWF> = Constellation {
namespace_receiver,
namespace_sender,
@ -889,6 +919,7 @@ where
webrender_image_api_sender: net_traits::WebrenderIpcSender::new(
webrender_image_ipc_sender,
),
webrender_wgpu,
shutting_down: false,
handled_warnings: VecDeque::new(),
random_pipeline_closure: random_pipeline_closure_probability.map(|prob| {
@ -1899,12 +1930,16 @@ where
EmbedderMsg::MediaSessionEvent(event),
));
},
FromScriptMsg::RequestAdapter(sender, options, ids) => self
.handle_request_wgpu_adapter(
source_pipeline_id,
BrowsingContextId::from(source_top_ctx_id),
FromScriptMsg::RequestAdapter(sender, options, ids),
),
FromScriptMsg::RequestAdapter(sender, options, ids) => self.handle_wgpu_request(
source_pipeline_id,
BrowsingContextId::from(source_top_ctx_id),
FromScriptMsg::RequestAdapter(sender, options, ids),
),
FromScriptMsg::GetWebGPUChan(sender) => self.handle_wgpu_request(
source_pipeline_id,
BrowsingContextId::from(source_top_ctx_id),
FromScriptMsg::GetWebGPUChan(sender),
),
}
}
@ -2072,7 +2107,7 @@ where
}
}
fn handle_request_wgpu_adapter(
fn handle_wgpu_request(
&mut self,
source_pipeline_id: PipelineId,
browsing_context_id: BrowsingContextId,
@ -2090,46 +2125,62 @@ where
Some(host) => host,
None => return warn!("Invalid host url"),
};
match self
let browsing_context_group = if let Some(bcg) = self
.browsing_context_group_set
.get_mut(&browsing_context_group_id)
{
Some(browsing_context_group) => {
let adapter_request =
if let FromScriptMsg::RequestAdapter(sender, options, ids) = request {
WebGPURequest::RequestAdapter {
sender,
options,
ids,
}
} else {
return warn!("Wrong message type in handle_request_wgpu_adapter");
};
let send = match browsing_context_group.webgpus.entry(host) {
Entry::Vacant(v) => v
.insert(match WebGPU::new() {
Some(webgpu) => {
let msg = ConstellationControlMsg::SetWebGPUPort(webgpu.1);
if let Err(e) = source_pipeline.event_loop.send(msg) {
warn!(
"Failed to send SetWebGPUPort to pipeline {} ({:?})",
source_pipeline_id, e
);
}
webgpu.0
},
None => return warn!("Failed to create new WebGPU thread"),
})
.0
.send(adapter_request),
Entry::Occupied(o) => o.get().0.send(adapter_request),
bcg
} else {
return warn!("Browsing context group not found");
};
let webgpu_chan = match browsing_context_group.webgpus.entry(host) {
Entry::Vacant(v) => v
.insert(
match WebGPU::new(
self.webrender_wgpu.webrender_api.create_sender(),
self.webrender_document,
self.webrender_wgpu.webrender_external_images.clone(),
self.webrender_wgpu.wgpu_image_map.clone(),
) {
Some(webgpu) => {
let msg = ConstellationControlMsg::SetWebGPUPort(webgpu.1);
if let Err(e) = source_pipeline.event_loop.send(msg) {
warn!(
"Failed to send SetWebGPUPort to pipeline {} ({:?})",
source_pipeline_id, e
);
}
webgpu.0
},
None => {
return warn!("Failed to create new WebGPU thread");
},
},
)
.clone(),
Entry::Occupied(o) => o.get().clone(),
};
match request {
FromScriptMsg::RequestAdapter(sender, options, ids) => {
let adapter_request = WebGPURequest::RequestAdapter {
sender,
options,
ids,
};
if send.is_err() {
if webgpu_chan.0.send(adapter_request).is_err() {
return warn!("Failed to send request adapter message on WebGPU channel");
}
},
None => return warn!("Browsing context group not found"),
};
FromScriptMsg::GetWebGPUChan(sender) => {
if sender.send(webgpu_chan).is_err() {
return warn!(
"Failed to send WebGPU channel to Pipeline {:?}",
source_pipeline_id
);
}
},
_ => return warn!("Wrong message type in handle_wgpu_request"),
}
}
fn handle_request_from_layout(&mut self, message: FromLayoutMsg) {

View file

@ -1905,6 +1905,7 @@ impl Fragment {
let image_key = match canvas_fragment_info.source {
CanvasFragmentSource::WebGL(image_key) => image_key,
CanvasFragmentSource::WebGPU(image_key) => image_key,
CanvasFragmentSource::Image(ref ipc_renderer) => match *ipc_renderer {
Some(ref ipc_renderer) => {
let ipc_renderer = ipc_renderer.lock().unwrap();

View file

@ -338,6 +338,7 @@ impl InlineAbsoluteFragmentInfo {
pub enum CanvasFragmentSource {
WebGL(webrender_api::ImageKey),
Image(Option<Arc<Mutex<IpcSender<CanvasMsg>>>>),
WebGPU(webrender_api::ImageKey),
}
#[derive(Clone)]
@ -355,6 +356,7 @@ impl CanvasFragmentInfo {
HTMLCanvasDataSource::Image(ipc_sender) => CanvasFragmentSource::Image(
ipc_sender.map(|renderer| Arc::new(Mutex::new(renderer))),
),
HTMLCanvasDataSource::WebGPU(image_key) => CanvasFragmentSource::WebGPU(image_key),
};
CanvasFragmentInfo {

View file

@ -20,7 +20,9 @@ use crate::media_thread::GLPlayerThread;
use euclid::default::Size2D;
use servo_media::player::context::{GlApi, GlContext, NativeDisplay, PlayerGLContext};
use std::sync::{Arc, Mutex};
use webrender_traits::{WebrenderExternalImageApi, WebrenderExternalImageRegistry};
use webrender_traits::{
WebrenderExternalImageApi, WebrenderExternalImageRegistry, WebrenderImageSource,
};
/// These are the messages that the GLPlayer thread will forward to
/// the video player which lives in htmlmediaelement
@ -146,7 +148,7 @@ impl GLPlayerExternalImages {
}
impl WebrenderExternalImageApi for GLPlayerExternalImages {
fn lock(&mut self, id: u64) -> (u32, Size2D<i32>) {
fn lock(&mut self, id: u64) -> (WebrenderImageSource, Size2D<i32>) {
// The GLPlayerMsgForward::Lock message inserts a fence in the
// GLPlayer command queue.
self.glplayer_channel
@ -160,7 +162,7 @@ impl WebrenderExternalImageApi for GLPlayerExternalImages {
// internal OpenGL subsystem.
//self.webrender_gl
// .wait_sync(gl_sync as gl::GLsync, 0, gl::TIMEOUT_IGNORED);
(image_id, size)
(WebrenderImageSource::TextureHandle(image_id), size)
}
fn unlock(&mut self, id: u64) {

View file

@ -37,6 +37,7 @@ use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::bindings::utils::WindowProxyHandler;
use crate::dom::gpubuffer::GPUBufferState;
use crate::dom::gpucanvascontext::WebGPUContextId;
use crate::dom::gpucommandencoder::GPUCommandEncoderState;
use crate::dom::htmlimageelement::SourceSet;
use crate::dom::htmlmediaelement::{HTMLMediaElementFetchContext, MediaFrameRenderer};
@ -168,7 +169,7 @@ use webgpu::{
WebGPUPipelineLayout, WebGPUQueue, WebGPURenderPipeline, WebGPUSampler, WebGPUShaderModule,
WebGPUTexture, WebGPUTextureView,
};
use webrender_api::{DocumentId, ImageKey};
use webrender_api::{DocumentId, ExternalImageId, ImageKey};
use webxr_api::SwapChainId as WebXRSwapChainId;
use webxr_api::{Finger, Hand, Ray, View};
@ -541,6 +542,7 @@ unsafe_no_jsmanaged_fields!(PathBuf);
unsafe_no_jsmanaged_fields!(DrawAPaintImageResult);
unsafe_no_jsmanaged_fields!(DocumentId);
unsafe_no_jsmanaged_fields!(ImageKey);
unsafe_no_jsmanaged_fields!(ExternalImageId);
unsafe_no_jsmanaged_fields!(WebGLBufferId);
unsafe_no_jsmanaged_fields!(WebGLChan);
unsafe_no_jsmanaged_fields!(WebGLFramebufferId);
@ -572,6 +574,7 @@ unsafe_no_jsmanaged_fields!(WebGPUShaderModule);
unsafe_no_jsmanaged_fields!(WebGPUSampler);
unsafe_no_jsmanaged_fields!(WebGPUTexture);
unsafe_no_jsmanaged_fields!(WebGPUTextureView);
unsafe_no_jsmanaged_fields!(WebGPUContextId);
unsafe_no_jsmanaged_fields!(WebGPUCommandBuffer);
unsafe_no_jsmanaged_fields!(WebGPUCommandEncoder);
unsafe_no_jsmanaged_fields!(WebGPUDevice);

View file

@ -53,6 +53,7 @@ use crate::dom::event::{Event, EventBubbles, EventCancelable, EventDefault, Even
use crate::dom::eventtarget::EventTarget;
use crate::dom::focusevent::FocusEvent;
use crate::dom::globalscope::GlobalScope;
use crate::dom::gpucanvascontext::{GPUCanvasContext, WebGPUContextId};
use crate::dom::hashchangeevent::HashChangeEvent;
use crate::dom::htmlanchorelement::HTMLAnchorElement;
use crate::dom::htmlareaelement::HTMLAreaElement;
@ -377,6 +378,8 @@ pub struct Document {
media_controls: DomRefCell<HashMap<String, Dom<ShadowRoot>>>,
/// List of all WebGL context IDs that need flushing.
dirty_webgl_contexts: DomRefCell<HashMap<WebGLContextId, Dom<WebGLRenderingContext>>>,
/// List of all WebGPU context IDs that need flushing.
dirty_webgpu_contexts: DomRefCell<HashMap<WebGPUContextId, Dom<GPUCanvasContext>>>,
/// https://html.spec.whatwg.org/multipage/#concept-document-csp-list
#[ignore_malloc_size_of = "Defined in rust-content-security-policy"]
csp_list: DomRefCell<Option<CspList>>,
@ -2643,14 +2646,14 @@ impl Document {
}
}
pub fn add_dirty_canvas(&self, context: &WebGLRenderingContext) {
pub fn add_dirty_webgl_canvas(&self, context: &WebGLRenderingContext) {
self.dirty_webgl_contexts
.borrow_mut()
.entry(context.context_id())
.or_insert_with(|| Dom::from_ref(context));
}
pub fn flush_dirty_canvases(&self) {
pub fn flush_dirty_webgl_canvases(&self) {
let dirty_context_ids: Vec<_> = self
.dirty_webgl_contexts
.borrow_mut()
@ -2678,6 +2681,21 @@ impl Document {
receiver.recv().unwrap();
}
pub fn add_dirty_webgpu_canvas(&self, context: &GPUCanvasContext) {
self.dirty_webgpu_contexts
.borrow_mut()
.entry(context.context_id())
.or_insert_with(|| Dom::from_ref(context));
}
#[allow(unrooted_must_root)]
pub fn flush_dirty_webgpu_canvases(&self) {
self.dirty_webgpu_contexts
.borrow_mut()
.drain()
.for_each(|(_, context)| context.send_swap_chain_present());
}
// https://html.spec.whatwg.org/multipage/#dom-tree-accessors:supported-property-names
// (This takes the filter as a method so the window named getter can use it too)
pub fn supported_property_names_impl(
@ -3039,6 +3057,7 @@ impl Document {
shadow_roots_styles_changed: Cell::new(false),
media_controls: DomRefCell::new(HashMap::new()),
dirty_webgl_contexts: DomRefCell::new(HashMap::new()),
dirty_webgpu_contexts: DomRefCell::new(HashMap::new()),
csp_list: DomRefCell::new(None),
selection: MutNullableDom::new(None),
animation_timeline: if pref!(layout.animations.test.enabled) {

View file

@ -278,6 +278,7 @@ pub struct GlobalScope {
/// An optional string allowing the user agent to be set for testing.
user_agent: Cow<'static, str>,
/// Identity Manager for WebGPU resources
#[ignore_malloc_size_of = "defined in wgpu"]
gpu_id_hub: Arc<Mutex<Identities>>,

View file

@ -0,0 +1,204 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::GPUCanvasContextBinding::{
GPUCanvasContextMethods, GPUSwapChainDescriptor,
};
use crate::dom::bindings::codegen::Bindings::GPUDeviceBinding::GPUDeviceBinding::GPUDeviceMethods;
use crate::dom::bindings::codegen::Bindings::GPUObjectBaseBinding::GPUObjectDescriptorBase;
use crate::dom::bindings::codegen::Bindings::GPUTextureBinding::{
GPUExtent3D, GPUExtent3DDict, GPUTextureDescriptor, GPUTextureDimension, GPUTextureFormat,
};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot, LayoutDom};
use crate::dom::globalscope::GlobalScope;
use crate::dom::gpuswapchain::GPUSwapChain;
use crate::dom::htmlcanvaselement::{HTMLCanvasElement, LayoutCanvasRenderingContextHelpers};
use crate::dom::node::{document_from_node, Node, NodeDamage};
use dom_struct::dom_struct;
use euclid::default::Size2D;
use ipc_channel::ipc;
use script_layout_interface::HTMLCanvasDataSource;
use std::cell::Cell;
use webgpu::{wgt, WebGPU, WebGPURequest};
#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
pub struct WebGPUContextId(pub u64);
#[dom_struct]
pub struct GPUCanvasContext {
reflector_: Reflector,
#[ignore_malloc_size_of = "channels are hard"]
channel: WebGPU,
canvas: Dom<HTMLCanvasElement>,
size: Cell<Size2D<u32>>,
swap_chain: DomRefCell<Option<Dom<GPUSwapChain>>>,
#[ignore_malloc_size_of = "Defined in webrender"]
webrender_image: Cell<Option<webrender_api::ImageKey>>,
context_id: WebGPUContextId,
}
impl GPUCanvasContext {
fn new_inherited(canvas: &HTMLCanvasElement, size: Size2D<u32>, channel: WebGPU) -> Self {
let (sender, receiver) = ipc::channel().unwrap();
let _ = channel.0.send(WebGPURequest::CreateContext(sender));
let external_id = receiver.recv().unwrap();
Self {
reflector_: Reflector::new(),
channel,
canvas: Dom::from_ref(canvas),
size: Cell::new(size),
swap_chain: DomRefCell::new(None),
webrender_image: Cell::new(None),
context_id: WebGPUContextId(external_id.0),
}
}
pub fn new(
global: &GlobalScope,
canvas: &HTMLCanvasElement,
size: Size2D<u32>,
channel: WebGPU,
) -> DomRoot<Self> {
reflect_dom_object(
Box::new(GPUCanvasContext::new_inherited(canvas, size, channel)),
global,
)
}
}
impl GPUCanvasContext {
fn layout_handle(&self) -> HTMLCanvasDataSource {
let image_key = if self.webrender_image.get().is_some() {
self.webrender_image.get().unwrap()
} else {
webrender_api::ImageKey::DUMMY
};
HTMLCanvasDataSource::WebGPU(image_key)
}
pub fn send_swap_chain_present(&self) {
let texture_id = self.swap_chain.borrow().as_ref().unwrap().texture_id().0;
let encoder_id = self
.global()
.wgpu_id_hub()
.lock()
.create_command_encoder_id(texture_id.backend());
if let Err(e) = self.channel.0.send(WebGPURequest::SwapChainPresent {
external_id: self.context_id.0,
texture_id,
encoder_id,
image_key: self.webrender_image.get().unwrap(),
}) {
warn!(
"Failed to send UpdateWebrenderData({:?}) ({})",
self.context_id, e
);
}
}
pub fn context_id(&self) -> WebGPUContextId {
self.context_id
}
pub fn mark_as_dirty(&self) {
self.canvas
.upcast::<Node>()
.dirty(NodeDamage::OtherNodeDamage);
let document = document_from_node(&*self.canvas);
document.add_dirty_webgpu_canvas(self);
}
}
impl LayoutCanvasRenderingContextHelpers for LayoutDom<'_, GPUCanvasContext> {
#[allow(unsafe_code)]
unsafe fn canvas_data_source(self) -> HTMLCanvasDataSource {
(*self.unsafe_get()).layout_handle()
}
}
impl GPUCanvasContextMethods for GPUCanvasContext {
/// https://gpuweb.github.io/gpuweb/#dom-gpucanvascontext-configureswapchain
fn ConfigureSwapChain(&self, descriptor: &GPUSwapChainDescriptor) -> DomRoot<GPUSwapChain> {
if let Some(chain) = &*self.swap_chain.borrow() {
chain.destroy(self.context_id.0, self.webrender_image.get().unwrap());
self.webrender_image.set(None);
}
*self.swap_chain.borrow_mut() = None;
let buffer_id = self
.global()
.wgpu_id_hub()
.lock()
.create_buffer_id(descriptor.device.id().0.backend());
let image_desc = webrender_api::ImageDescriptor {
format: match descriptor.format {
GPUTextureFormat::Rgba8unorm => webrender_api::ImageFormat::RGBA8,
GPUTextureFormat::Bgra8unorm => webrender_api::ImageFormat::BGRA8,
_ => panic!("SwapChain format({:?}) not supported", descriptor.format),
},
size: webrender_api::units::DeviceIntSize::new(
self.size.get().width as i32,
self.size.get().height as i32,
),
stride: Some(
(((self.size.get().width * 4) | (wgt::COPY_BYTES_PER_ROW_ALIGNMENT - 1)) + 1)
as i32,
),
offset: 0,
flags: webrender_api::ImageDescriptorFlags::from_bits(1).unwrap(),
};
let image_data = webrender_api::ImageData::External(webrender_api::ExternalImageData {
id: webrender_api::ExternalImageId(self.context_id.0),
channel_index: 0,
image_type: webrender_api::ExternalImageType::Buffer,
});
let (sender, receiver) = ipc::channel().unwrap();
self.channel
.0
.send(WebGPURequest::CreateSwapChain {
device_id: descriptor.device.id().0,
buffer_id,
external_id: self.context_id.0,
sender,
image_desc,
image_data,
})
.expect("Failed to create WebGPU SwapChain");
let usage = if descriptor.usage % 2 == 0 {
descriptor.usage + 1
} else {
descriptor.usage
};
let text_desc = GPUTextureDescriptor {
parent: GPUObjectDescriptorBase { label: None },
dimension: GPUTextureDimension::_2d,
format: descriptor.format,
mipLevelCount: 1,
sampleCount: 1,
usage,
size: GPUExtent3D::GPUExtent3DDict(GPUExtent3DDict {
width: self.size.get().width,
height: self.size.get().height,
depth: 1,
}),
};
let texture = descriptor.device.CreateTexture(&text_desc);
self.webrender_image.set(Some(receiver.recv().unwrap()));
let swap_chain = GPUSwapChain::new(&self.global(), self.channel.clone(), &self, &*texture);
*self.swap_chain.borrow_mut() = Some(Dom::from_ref(&*swap_chain));
swap_chain
}
}

View file

@ -118,6 +118,10 @@ impl GPUDevice {
}
impl GPUDevice {
pub fn id(&self) -> webgpu::WebGPUDevice {
self.device
}
fn validate_buffer_descriptor(
&self,
descriptor: &GPUBufferDescriptor,

View file

@ -0,0 +1,85 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::GPUSwapChainBinding::GPUSwapChainMethods;
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::DOMString;
use crate::dom::globalscope::GlobalScope;
use crate::dom::gpucanvascontext::GPUCanvasContext;
use crate::dom::gputexture::GPUTexture;
use dom_struct::dom_struct;
use webgpu::{WebGPU, WebGPURequest, WebGPUTexture};
#[dom_struct]
pub struct GPUSwapChain {
reflector_: Reflector,
#[ignore_malloc_size_of = "channels are hard"]
channel: WebGPU,
label: DomRefCell<Option<DOMString>>,
context: Dom<GPUCanvasContext>,
texture: Dom<GPUTexture>,
}
impl GPUSwapChain {
fn new_inherited(channel: WebGPU, context: &GPUCanvasContext, texture: &GPUTexture) -> Self {
Self {
reflector_: Reflector::new(),
channel,
context: Dom::from_ref(context),
texture: Dom::from_ref(texture),
label: DomRefCell::new(None),
}
}
pub fn new(
global: &GlobalScope,
channel: WebGPU,
context: &GPUCanvasContext,
texture: &GPUTexture,
) -> DomRoot<Self> {
reflect_dom_object(
Box::new(GPUSwapChain::new_inherited(channel, context, texture)),
global,
)
}
}
impl GPUSwapChain {
pub fn destroy(&self, external_id: u64, image_key: webrender_api::ImageKey) {
if let Err(e) = self.channel.0.send(WebGPURequest::DestroySwapChain {
external_id,
image_key,
}) {
warn!(
"Failed to send DestroySwapChain-ImageKey({:?}) ({})",
image_key, e
);
}
}
pub fn texture_id(&self) -> WebGPUTexture {
self.texture.id()
}
}
impl GPUSwapChainMethods for GPUSwapChain {
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn GetLabel(&self) -> Option<DOMString> {
self.label.borrow().clone()
}
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn SetLabel(&self, value: Option<DOMString>) {
*self.label.borrow_mut() = value;
}
/// https://gpuweb.github.io/gpuweb/#dom-gpuswapchain-getcurrenttexture
fn GetCurrentTexture(&self) -> DomRoot<GPUTexture> {
self.context.mark_as_dirty();
//self.context.send_swap_chain_present();
DomRoot::from_ref(&*self.texture)
}
}

View file

@ -103,6 +103,12 @@ impl Drop for GPUTexture {
}
}
impl GPUTexture {
pub fn id(&self) -> WebGPUTexture {
self.texture
}
}
impl GPUTextureMethods for GPUTexture {
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn GetLabel(&self) -> Option<DOMString> {

View file

@ -20,13 +20,12 @@ use crate::dom::canvasrenderingcontext2d::{
use crate::dom::document::Document;
use crate::dom::element::{AttributeMutation, Element, LayoutElementHelpers};
use crate::dom::globalscope::GlobalScope;
use crate::dom::gpucanvascontext::GPUCanvasContext;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{window_from_node, Node};
use crate::dom::virtualmethods::VirtualMethods;
use crate::dom::webgl2renderingcontext::WebGL2RenderingContext;
use crate::dom::webglrenderingcontext::{
LayoutCanvasWebGLRenderingContextHelpers, WebGLRenderingContext,
};
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use crate::script_runtime::JSContext;
use base64;
use canvas_traits::canvas::{CanvasId, CanvasMsg, FromScriptMsg};
@ -36,11 +35,12 @@ use euclid::default::{Rect, Size2D};
use html5ever::{LocalName, Prefix};
use image::png::PNGEncoder;
use image::ColorType;
use ipc_channel::ipc::IpcSharedMemory;
use ipc_channel::ipc::{self as ipcchan, IpcSharedMemory};
use js::error::throw_type_error;
use js::rust::HandleValue;
use profile_traits::ipc;
use script_layout_interface::{HTMLCanvasData, HTMLCanvasDataSource};
use script_traits::ScriptMsg;
use style::attr::{AttrValue, LengthOrPercentageOrAuto};
const DEFAULT_WIDTH: u32 = 300;
@ -52,6 +52,7 @@ pub enum CanvasContext {
Context2d(Dom<CanvasRenderingContext2D>),
WebGL(Dom<WebGLRenderingContext>),
WebGL2(Dom<WebGL2RenderingContext>),
WebGPU(Dom<GPUCanvasContext>),
}
#[dom_struct]
@ -95,6 +96,7 @@ impl HTMLCanvasElement {
},
CanvasContext::WebGL(ref context) => context.recreate(size),
CanvasContext::WebGL2(ref context) => context.recreate(size),
CanvasContext::WebGPU(_) => unimplemented!(),
}
}
}
@ -111,6 +113,11 @@ impl HTMLCanvasElement {
}
}
pub trait LayoutCanvasRenderingContextHelpers {
#[allow(unsafe_code)]
unsafe fn canvas_data_source(self) -> HTMLCanvasDataSource;
}
pub trait LayoutHTMLCanvasElementHelpers {
fn data(self) -> HTMLCanvasData;
fn get_width(self) -> LengthOrPercentageOrAuto;
@ -132,6 +139,9 @@ impl LayoutHTMLCanvasElementHelpers for LayoutDom<'_, HTMLCanvasElement> {
Some(&CanvasContext::WebGL2(ref context)) => {
context.to_layout().canvas_data_source()
},
Some(&CanvasContext::WebGPU(ref context)) => {
context.to_layout().canvas_data_source()
},
None => HTMLCanvasDataSource::Image(None),
}
};
@ -239,6 +249,26 @@ impl HTMLCanvasElement {
Some(context)
}
fn get_or_init_webgpu_context(&self) -> Option<DomRoot<GPUCanvasContext>> {
if let Some(ctx) = self.context() {
return match *ctx {
CanvasContext::WebGPU(ref ctx) => Some(DomRoot::from_ref(ctx)),
_ => None,
};
}
let (sender, receiver) = ipcchan::channel().unwrap();
let _ = self
.global()
.script_to_constellation_chan()
.send(ScriptMsg::GetWebGPUChan(sender));
let window = window_from_node(self);
let size = self.get_size();
let channel = receiver.recv().expect("Failed to get WebGPU channel");
let context = GPUCanvasContext::new(window.upcast::<GlobalScope>(), self, size, channel);
*self.context.borrow_mut() = Some(CanvasContext::WebGPU(Dom::from_ref(&*context)));
Some(context)
}
/// Gets the base WebGLRenderingContext for WebGL or WebGL 2, if exists.
pub fn get_base_webgl_context(&self) -> Option<DomRoot<WebGLRenderingContext>> {
match *self.context.borrow() {
@ -296,6 +326,10 @@ impl HTMLCanvasElement {
// TODO: add a method in WebGL2RenderingContext to get the pixels.
return None;
},
Some(&CanvasContext::WebGPU(_)) => {
// TODO: add a method in GPUCanvasContext to get the pixels.
return None;
},
None => None,
};
@ -333,6 +367,9 @@ impl HTMLCanvasElementMethods for HTMLCanvasElement {
"webgl2" | "experimental-webgl2" => self
.get_or_init_webgl2_context(cx, options)
.map(RenderingContext::WebGL2RenderingContext),
"gpupresent" => self
.get_or_init_webgpu_context()
.map(RenderingContext::GPUCanvasContext),
_ => None,
}
}
@ -371,6 +408,8 @@ impl HTMLCanvasElementMethods for HTMLCanvasElement {
None => return Ok(USVString("data:,".into())),
}
},
//TODO: Add method get_image_data to GPUCanvasContext
Some(CanvasContext::WebGPU(_)) => return Ok(USVString("data:,".into())),
None => {
// Each pixel is fully-transparent black.
vec![0; (self.Width() * self.Height() * 4) as usize]

View file

@ -325,6 +325,7 @@ pub mod gpubindgroup;
pub mod gpubindgrouplayout;
pub mod gpubuffer;
pub mod gpubufferusage;
pub mod gpucanvascontext;
pub mod gpucolorwrite;
pub mod gpucommandbuffer;
pub mod gpucommandencoder;
@ -338,6 +339,7 @@ pub mod gpurenderpipeline;
pub mod gpusampler;
pub mod gpushadermodule;
pub mod gpushaderstage;
pub mod gpuswapchain;
pub mod gputexture;
pub mod gputextureusage;
pub mod gputextureview;

View file

@ -17,7 +17,7 @@ use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot, LayoutDom, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlcanvaselement::HTMLCanvasElement;
use crate::dom::htmlcanvaselement::{HTMLCanvasElement, LayoutCanvasRenderingContextHelpers};
use crate::dom::webgl_validations::tex_image_2d::{
TexImage2DValidator, TexImage2DValidatorResult, TexStorageValidator, TexStorageValidatorResult,
};
@ -29,8 +29,7 @@ use crate::dom::webglprogram::WebGLProgram;
use crate::dom::webglquery::WebGLQuery;
use crate::dom::webglrenderbuffer::WebGLRenderbuffer;
use crate::dom::webglrenderingcontext::{
uniform_get, uniform_typed, LayoutCanvasWebGLRenderingContextHelpers, Operation, TexPixels,
VertexAttrib, WebGLRenderingContext,
uniform_get, uniform_typed, Operation, TexPixels, VertexAttrib, WebGLRenderingContext,
};
use crate::dom::webglsampler::{WebGLSampler, WebGLSamplerValue};
use crate::dom::webglshader::WebGLShader;
@ -4381,7 +4380,7 @@ impl WebGL2RenderingContextMethods for WebGL2RenderingContext {
}
}
impl LayoutCanvasWebGLRenderingContextHelpers for LayoutDom<'_, WebGL2RenderingContext> {
impl LayoutCanvasRenderingContextHelpers for LayoutDom<'_, WebGL2RenderingContext> {
#[allow(unsafe_code)]
unsafe fn canvas_data_source(self) -> HTMLCanvasDataSource {
let this = &*self.unsafe_get();

View file

@ -23,7 +23,7 @@ use crate::dom::bindings::str::DOMString;
use crate::dom::element::cors_setting_for_element;
use crate::dom::event::{Event, EventBubbles, EventCancelable};
use crate::dom::htmlcanvaselement::utils as canvas_utils;
use crate::dom::htmlcanvaselement::HTMLCanvasElement;
use crate::dom::htmlcanvaselement::{HTMLCanvasElement, LayoutCanvasRenderingContextHelpers};
use crate::dom::htmliframeelement::HTMLIFrameElement;
use crate::dom::node::{document_from_node, window_from_node, Node, NodeDamage};
use crate::dom::promise::Promise;
@ -539,7 +539,7 @@ impl WebGLRenderingContext {
.dirty(NodeDamage::OtherNodeDamage);
let document = document_from_node(&*self.canvas);
document.add_dirty_canvas(self);
document.add_dirty_webgl_canvas(self);
}
fn vertex_attrib(&self, indx: u32, x: f32, y: f32, z: f32, w: f32) {
@ -4697,12 +4697,7 @@ impl WebGLRenderingContextMethods for WebGLRenderingContext {
}
}
pub trait LayoutCanvasWebGLRenderingContextHelpers {
#[allow(unsafe_code)]
unsafe fn canvas_data_source(self) -> HTMLCanvasDataSource;
}
impl LayoutCanvasWebGLRenderingContextHelpers for LayoutDom<'_, WebGLRenderingContext> {
impl LayoutCanvasRenderingContextHelpers for LayoutDom<'_, WebGLRenderingContext> {
#[allow(unsafe_code)]
unsafe fn canvas_data_source(self) -> HTMLCanvasDataSource {
(*self.unsafe_get()).layout_handle()

View file

@ -0,0 +1,17 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// https://gpuweb.github.io/gpuweb/#gpucanvascontext
[Exposed=(Window, DedicatedWorker), Pref="dom.webgpu.enabled"]
interface GPUCanvasContext {
GPUSwapChain configureSwapChain(GPUSwapChainDescriptor descriptor);
//Promise<GPUTextureFormat> getSwapChainPreferredFormat(GPUDevice device);
};
dictionary GPUSwapChainDescriptor : GPUObjectDescriptorBase {
required GPUDevice device;
required GPUTextureFormat format;
GPUTextureUsageFlags usage = 0x10; // GPUTextureUsage.OUTPUT_ATTACHMENT
};

View file

@ -0,0 +1,10 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// https://gpuweb.github.io/gpuweb/#gpuswapchain
[Exposed=(Window, DedicatedWorker), Pref="dom.webgpu.enabled"]
interface GPUSwapChain {
GPUTexture getCurrentTexture();
};
GPUSwapChain includes GPUObjectBase;

View file

@ -3,7 +3,10 @@
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// https://html.spec.whatwg.org/multipage/#htmlcanvaselement
typedef (CanvasRenderingContext2D or WebGLRenderingContext or WebGL2RenderingContext) RenderingContext;
typedef (CanvasRenderingContext2D
or WebGLRenderingContext
or WebGL2RenderingContext
or GPUCanvasContext) RenderingContext;
[Exposed=Window]
interface HTMLCanvasElement : HTMLElement {

View file

@ -1665,7 +1665,8 @@ impl Window {
// If this reflow is for display, ensure webgl canvases are composited with
// up-to-date contents.
if for_display {
document.flush_dirty_canvases();
document.flush_dirty_webgpu_canvases();
document.flush_dirty_webgl_canvases();
}
let pending_restyles = document.drain_pending_restyles();

View file

@ -123,6 +123,7 @@ pub enum LayoutElementType {
pub enum HTMLCanvasDataSource {
WebGL(webrender_api::ImageKey),
Image(Option<IpcSender<CanvasMsg>>),
WebGPU(webrender_api::ImageKey),
}
pub struct HTMLCanvasData {

View file

@ -38,7 +38,7 @@ use std::collections::{HashMap, VecDeque};
use std::fmt;
use style_traits::viewport::ViewportConstraints;
use style_traits::CSSPixel;
use webgpu::{wgpu, WebGPUResponseResult};
use webgpu::{wgpu, WebGPU, WebGPUResponseResult};
use webrender_api::units::{DeviceIntPoint, DeviceIntSize};
/// A particular iframe's size, associated with a browsing context.
@ -280,6 +280,8 @@ pub enum ScriptMsg {
wgpu::instance::RequestAdapterOptions,
SmallVec<[wgpu::id::AdapterId; 4]>,
),
/// Get WebGPU channel
GetWebGPUChan(IpcSender<WebGPU>),
}
impl fmt::Debug for ScriptMsg {
@ -338,6 +340,7 @@ impl fmt::Debug for ScriptMsg {
GetScreenAvailSize(..) => "GetScreenAvailSize",
MediaSessionEvent(..) => "MediaSessionEvent",
RequestAdapter(..) => "RequestAdapter",
GetWebGPUChan(..) => "GetWebGPUChan",
};
write!(formatter, "ScriptMsg::{}", variant)
}

View file

@ -115,6 +115,7 @@ use servo_media::player::context::GlContext;
use servo_media::ServoMedia;
use std::borrow::Cow;
use std::cmp::max;
use std::collections::HashMap;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
@ -476,13 +477,21 @@ where
let glplayer_threads = match window.get_gl_context() {
GlContext::Unknown => None,
_ => {
let (glplayer_threads, image_handler) = GLPlayerThreads::new(external_images);
let (glplayer_threads, image_handler) =
GLPlayerThreads::new(external_images.clone());
external_image_handlers
.set_handler(image_handler, WebrenderImageHandlerType::Media);
Some(glplayer_threads)
},
};
let wgpu_image_handler = webgpu::WGPUExternalImages::new();
let wgpu_image_map = wgpu_image_handler.images.clone();
external_image_handlers.set_handler(
Box::new(wgpu_image_handler),
WebrenderImageHandlerType::WebGPU,
);
let player_context = WindowGLContext {
gl_context: window.get_gl_context(),
native_display: window.get_native_display(),
@ -514,12 +523,15 @@ where
debugger_chan,
devtools_chan,
webrender_document,
webrender_api_sender,
webxr_main_thread.registry(),
player_context,
webgl_threads,
glplayer_threads,
event_loop_waker,
window_size,
external_images,
wgpu_image_map,
);
if cfg!(feature = "webdriver") {
@ -851,12 +863,15 @@ fn create_constellation(
debugger_chan: Option<debugger::Sender>,
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
webrender_document: webrender_api::DocumentId,
webrender_api_sender: webrender_api::RenderApiSender,
webxr_registry: webxr_api::Registry,
player_context: WindowGLContext,
webgl_threads: Option<WebGLThreads>,
glplayer_threads: Option<GLPlayerThreads>,
event_loop_waker: Option<Box<dyn EventLoopWaker>>,
initial_window_size: WindowSizeData,
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
wgpu_image_map: Arc<Mutex<HashMap<u64, webgpu::PresentationData>>>,
) -> Sender<ConstellationMsg> {
// Global configuration options, parsed from the command line.
let opts = opts::get();
@ -896,12 +911,15 @@ fn create_constellation(
time_profiler_chan,
mem_profiler_chan,
webrender_document,
webrender_api_sender,
webxr_registry,
webgl_threads,
glplayer_threads,
player_context,
event_loop_waker,
user_agent,
webrender_external_images: external_images,
wgpu_image_map,
};
let constellation_chan = Constellation::<

View file

@ -12,11 +12,14 @@ path = "lib.rs"
[dependencies]
arrayvec = { version = "0.5.1", features = ["serde"] }
euclid = "0.20"
ipc-channel = "0.14"
log = "0.4"
malloc_size_of = { path = "../malloc_size_of" }
serde = { version = "1.0", features = ["serde_derive"] }
servo_config = { path = "../config" }
smallvec = { version = "0.6", features = ["serde"] }
webrender_api = { git = "https://github.com/servo/webrender" }
webrender_traits = { path = "../webrender_traits" }
wgpu-core = { version = "0.5.0", git = "https://github.com/gfx-rs/wgpu", features = ["replay", "trace"] }
wgpu-types = { version = "0.5.0", git = "https://github.com/gfx-rs/wgpu", features = ["replay", "trace"] }

View file

@ -11,20 +11,31 @@ pub extern crate wgpu_types as wgt;
pub mod identity;
use arrayvec::ArrayVec;
use euclid::default::Size2D;
use identity::{IdentityRecyclerFactory, WebGPUMsg};
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use serde::{Deserialize, Serialize};
use servo_config::pref;
use smallvec::SmallVec;
use std::collections::HashMap;
use std::ffi::CString;
use std::ptr;
use std::slice;
use std::sync::{Arc, Mutex};
use webrender_traits::{
WebrenderExternalImageApi, WebrenderExternalImageRegistry, WebrenderImageHandlerType,
WebrenderImageSource,
};
use wgpu::{
binding_model::{
BindGroupDescriptor, BindGroupEntry, BindGroupLayoutDescriptor, BindGroupLayoutEntry,
},
command::{BufferCopyView, TextureCopyView},
device::HostMap,
id,
instance::RequestAdapterOptions,
resource::{BufferMapAsyncStatus, BufferMapOperation},
};
#[derive(Debug, Deserialize, Serialize)]
@ -87,6 +98,7 @@ pub enum WebGPURequest {
program_id: id::ShaderModuleId,
entry_point: String,
},
CreateContext(IpcSender<webrender_api::ExternalImageId>),
CreatePipelineLayout {
device_id: id::DeviceId,
pipeline_layout_id: id::PipelineLayoutId,
@ -122,6 +134,14 @@ pub enum WebGPURequest {
program_id: id::ShaderModuleId,
program: Vec<u32>,
},
CreateSwapChain {
device_id: id::DeviceId,
buffer_id: id::BufferId,
external_id: u64,
sender: IpcSender<webrender_api::ImageKey>,
image_desc: webrender_api::ImageDescriptor,
image_data: webrender_api::ImageData,
},
CreateTexture {
device_id: id::DeviceId,
texture_id: id::TextureId,
@ -133,6 +153,10 @@ pub enum WebGPURequest {
descriptor: wgt::TextureViewDescriptor<String>,
},
DestroyBuffer(id::BufferId),
DestroySwapChain {
external_id: u64,
image_key: webrender_api::ImageKey,
},
DestroyTexture(id::TextureId),
Exit(IpcSender<()>),
RequestAdapter {
@ -158,6 +182,12 @@ pub enum WebGPURequest {
queue_id: id::QueueId,
command_buffers: Vec<id::CommandBufferId>,
},
SwapChainPresent {
external_id: u64,
texture_id: id::TextureId,
encoder_id: id::CommandEncoderId,
image_key: webrender_api::ImageKey,
},
UnmapBuffer {
device_id: id::DeviceId,
buffer_id: id::BufferId,
@ -169,7 +199,12 @@ pub enum WebGPURequest {
pub struct WebGPU(pub IpcSender<WebGPURequest>);
impl WebGPU {
pub fn new() -> Option<(Self, IpcReceiver<WebGPUMsg>)> {
pub fn new(
webrender_api_sender: webrender_api::RenderApiSender,
webrender_document: webrender_api::DocumentId,
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
wgpu_image_map: Arc<Mutex<HashMap<u64, PresentationData>>>,
) -> Option<(Self, IpcReceiver<WebGPUMsg>)> {
if !pref!(dom.webgpu.enabled) {
return None;
}
@ -199,7 +234,16 @@ impl WebGPU {
if let Err(e) = std::thread::Builder::new()
.name("WGPU".to_owned())
.spawn(move || {
WGPU::new(receiver, sender_clone, script_sender).run();
WGPU::new(
receiver,
sender_clone,
script_sender,
webrender_api_sender,
webrender_document,
external_images,
wgpu_image_map,
)
.run();
})
{
warn!("Failed to spwan WGPU thread ({})", e);
@ -224,6 +268,10 @@ struct WGPU {
devices: Vec<WebGPUDevice>,
// Track invalid adapters https://gpuweb.github.io/gpuweb/#invalid
_invalid_adapters: Vec<WebGPUAdapter>,
webrender_api: webrender_api::RenderApi,
webrender_document: webrender_api::DocumentId,
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
wgpu_image_map: Arc<Mutex<HashMap<u64, PresentationData>>>,
}
impl WGPU {
@ -231,6 +279,10 @@ impl WGPU {
receiver: IpcReceiver<WebGPURequest>,
sender: IpcSender<WebGPURequest>,
script_sender: IpcSender<WebGPUMsg>,
webrender_api_sender: webrender_api::RenderApiSender,
webrender_document: webrender_api::DocumentId,
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
wgpu_image_map: Arc<Mutex<HashMap<u64, PresentationData>>>,
) -> Self {
let factory = IdentityRecyclerFactory {
sender: script_sender.clone(),
@ -243,6 +295,10 @@ impl WGPU {
adapters: Vec::new(),
devices: Vec::new(),
_invalid_adapters: Vec::new(),
webrender_api: webrender_api_sender.create_api(),
webrender_document,
external_images,
wgpu_image_map,
}
}
@ -322,6 +378,16 @@ impl WGPU {
let _ = gfx_select!(command_encoder_id =>
global.device_create_command_encoder(device_id, &Default::default(), command_encoder_id));
},
WebGPURequest::CreateContext(sender) => {
let id = self
.external_images
.lock()
.expect("Lock poisoned?")
.next_id(WebrenderImageHandlerType::WebGPU);
if let Err(e) = sender.send(id) {
warn!("Failed to send ExternalImageId to new context ({})", e);
};
},
WebGPURequest::CreateComputePipeline {
device_id,
compute_pipeline_id,
@ -374,9 +440,10 @@ impl WGPU {
} => {
let global = &self.global;
let vertex_ep = std::ffi::CString::new(vertex_entry_point).unwrap();
let frag_ep;
let frag_stage = match fragment_module {
Some(frag) => {
let frag_ep =
frag_ep =
std::ffi::CString::new(fragment_entry_point.unwrap()).unwrap();
let frag_module = wgpu_core::pipeline::ProgrammableStageDescriptor {
module: frag,
@ -450,6 +517,55 @@ impl WGPU {
let _ = gfx_select!(program_id =>
global.device_create_shader_module(device_id, &descriptor, program_id));
},
WebGPURequest::CreateSwapChain {
device_id,
buffer_id,
external_id,
sender,
image_desc,
image_data,
} => {
let height = image_desc.size.height;
let width = image_desc.size.width;
let buffer_stride =
((width * 4) as u32 | (wgt::COPY_BYTES_PER_ROW_ALIGNMENT - 1)) + 1;
let _ = self.wgpu_image_map.lock().unwrap().insert(
external_id,
PresentationData {
device_id,
queue_id: device_id,
data: vec![255; (buffer_stride * height as u32) as usize],
size: Size2D::new(width, height),
buffer_id,
buffer_stride,
image_desc,
image_data: image_data.clone(),
},
);
let buffer_size = (buffer_stride * height as u32) as wgt::BufferAddress;
let global = &self.global;
let buffer_desc = wgt::BufferDescriptor {
label: ptr::null(),
size: buffer_size,
usage: wgt::BufferUsage::MAP_READ | wgt::BufferUsage::COPY_DST,
mapped_at_creation: false,
};
let _ = gfx_select!(buffer_id => global.device_create_buffer(
device_id,
&buffer_desc,
buffer_id
));
let image_key = self.webrender_api.generate_image_key();
if let Err(e) = sender.send(image_key) {
warn!("Failed to send ImageKey ({})", e);
}
let mut txn = webrender_api::Transaction::new();
txn.add_image(image_key, image_desc, image_data, None);
self.webrender_api
.send_transaction(self.webrender_document, txn);
},
WebGPURequest::CreateTexture {
device_id,
texture_id,
@ -477,6 +593,23 @@ impl WGPU {
let global = &self.global;
gfx_select!(buffer => global.buffer_destroy(buffer));
},
WebGPURequest::DestroySwapChain {
external_id,
image_key,
} => {
let data = self
.wgpu_image_map
.lock()
.unwrap()
.remove(&external_id)
.unwrap();
let global = &self.global;
gfx_select!(data.buffer_id => global.buffer_destroy(data.buffer_id));
let mut txn = webrender_api::Transaction::new();
txn.delete_image(image_key);
self.webrender_api
.send_transaction(self.webrender_document, txn);
},
WebGPURequest::DestroyTexture(texture) => {
let global = &self.global;
gfx_select!(texture => global.texture_destroy(texture));
@ -588,6 +721,110 @@ impl WGPU {
&command_buffers
));
},
WebGPURequest::SwapChainPresent {
external_id,
texture_id,
encoder_id,
image_key,
} => {
let global = &self.global;
let device_id;
let queue_id;
let size;
let buffer_id;
let buffer_stride;
{
if let Some(present_data) =
self.wgpu_image_map.lock().unwrap().get_mut(&external_id)
{
size = present_data.size;
device_id = present_data.device_id;
queue_id = present_data.queue_id;
buffer_id = present_data.buffer_id;
buffer_stride = present_data.buffer_stride;
} else {
warn!("Data not found for ExternalImageId({:?})", external_id);
continue;
}
}
let buffer_size = (size.height as u32 * buffer_stride) as wgt::BufferAddress;
let _ = gfx_select!(encoder_id => global.device_create_command_encoder(
device_id,
&wgt::CommandEncoderDescriptor::default(),
encoder_id
));
let buffer_cv = BufferCopyView {
buffer: buffer_id,
layout: wgt::TextureDataLayout {
offset: 0,
bytes_per_row: buffer_stride,
rows_per_image: 0,
},
};
let texture_cv = TextureCopyView {
texture: texture_id,
mip_level: 0,
origin: wgt::Origin3d::ZERO,
};
let copy_size = wgt::Extent3d {
width: size.width as u32,
height: size.height as u32,
depth: 1,
};
gfx_select!(encoder_id => global.command_encoder_copy_texture_to_buffer(
encoder_id,
&texture_cv,
&buffer_cv,
&copy_size
));
let _ = gfx_select!(encoder_id => global.command_encoder_finish(
encoder_id,
&wgt::CommandBufferDescriptor::default()
));
gfx_select!(queue_id => global.queue_submit(
queue_id,
&[encoder_id]
));
extern "C" fn callback(status: BufferMapAsyncStatus, _user_data: *mut u8) {
match status {
BufferMapAsyncStatus::Success => {
debug!("Buffer Mapped");
},
_ => warn!("Could not map buffer"),
}
}
let map_op = BufferMapOperation {
host: HostMap::Read,
callback,
user_data: ptr::null_mut(),
};
gfx_select!(buffer_id => global.buffer_map_async(buffer_id, 0..buffer_size, map_op));
// TODO: Remove the blocking behaviour
gfx_select!(device_id => global.device_poll(device_id, true));
let buf_data = gfx_select!(buffer_id =>
global.buffer_get_mapped_range(buffer_id, 0, wgt::BufferSize::WHOLE));
if let Some(present_data) =
self.wgpu_image_map.lock().unwrap().get_mut(&external_id)
{
present_data.data = unsafe {
slice::from_raw_parts(buf_data, buffer_size as usize).to_vec()
};
let mut txn = webrender_api::Transaction::new();
txn.update_image(
image_key,
present_data.image_desc,
present_data.image_data.clone(),
&webrender_api::DirtyRect::All,
);
self.webrender_api
.send_transaction(self.webrender_document, txn);
} else {
warn!("Data not found for ExternalImageId({:?})", external_id);
}
gfx_select!(buffer_id => global.buffer_unmap(buffer_id));
},
WebGPURequest::UnmapBuffer {
device_id,
buffer_id,
@ -635,5 +872,54 @@ webgpu_resource!(WebGPUQueue, id::QueueId);
webgpu_resource!(WebGPURenderPipeline, id::RenderPipelineId);
webgpu_resource!(WebGPUSampler, id::SamplerId);
webgpu_resource!(WebGPUShaderModule, id::ShaderModuleId);
webgpu_resource!(WebGPUSwapChain, id::SwapChainId);
webgpu_resource!(WebGPUTexture, id::TextureId);
webgpu_resource!(WebGPUTextureView, id::TextureViewId);
pub struct WGPUExternalImages {
pub images: Arc<Mutex<HashMap<u64, PresentationData>>>,
pub locked_ids: HashMap<u64, Vec<u8>>,
}
impl WGPUExternalImages {
pub fn new() -> Self {
Self {
images: Arc::new(Mutex::new(HashMap::new())),
locked_ids: HashMap::new(),
}
}
}
impl WebrenderExternalImageApi for WGPUExternalImages {
fn lock(&mut self, id: u64) -> (WebrenderImageSource, Size2D<i32>) {
let size;
let data;
if let Some(present_data) = self.images.lock().unwrap().get(&id) {
size = present_data.size;
data = present_data.data.clone();
} else {
size = Size2D::new(0, 0);
data = Vec::new();
}
let _ = self.locked_ids.insert(id, data);
(
WebrenderImageSource::Raw(self.locked_ids.get(&id).unwrap().as_slice()),
size,
)
}
fn unlock(&mut self, id: u64) {
let _ = self.locked_ids.remove(&id);
}
}
pub struct PresentationData {
device_id: id::DeviceId,
queue_id: id::QueueId,
pub data: Vec<u8>,
pub size: Size2D<i32>,
buffer_id: id::BufferId,
buffer_stride: u32,
image_desc: webrender_api::ImageDescriptor,
image_data: webrender_api::ImageData,
}

View file

@ -18,18 +18,24 @@ use webrender_api::units::TexelRect;
/// This trait is used to notify lock/unlock messages and get the
/// required info that WR needs.
pub trait WebrenderExternalImageApi {
fn lock(&mut self, id: u64) -> (u32, Size2D<i32>);
fn lock(&mut self, id: u64) -> (WebrenderImageSource, Size2D<i32>);
fn unlock(&mut self, id: u64);
}
pub enum WebrenderImageSource<'a> {
TextureHandle(u32),
Raw(&'a [u8]),
}
/// Type of Webrender External Image Handler.
pub enum WebrenderImageHandlerType {
WebGL,
Media,
WebGPU,
}
/// List of Webrender external images to be shared among all external image
/// consumers (WebGL, Media).
/// consumers (WebGL, Media, WebGPU).
/// It ensures that external image identifiers are unique.
pub struct WebrenderExternalImageRegistry {
/// Map of all generated external images.
@ -71,6 +77,8 @@ pub struct WebrenderExternalImageHandlers {
webgl_handler: Option<Box<dyn WebrenderExternalImageApi>>,
/// Media player handler.
media_handler: Option<Box<dyn WebrenderExternalImageApi>>,
/// WebGPU handler.
webgpu_handler: Option<Box<dyn WebrenderExternalImageApi>>,
/// Webrender external images.
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
}
@ -82,6 +90,7 @@ impl WebrenderExternalImageHandlers {
Self {
webgl_handler: None,
media_handler: None,
webgpu_handler: None,
external_images: external_images.clone(),
},
external_images,
@ -96,6 +105,7 @@ impl WebrenderExternalImageHandlers {
match handler_type {
WebrenderImageHandlerType::WebGL => self.webgl_handler = Some(handler),
WebrenderImageHandlerType::Media => self.media_handler = Some(handler),
WebrenderImageHandlerType::WebGPU => self.webgpu_handler = Some(handler),
}
}
}
@ -115,25 +125,40 @@ impl webrender_api::ExternalImageHandler for WebrenderExternalImageHandlers {
let handler_type = external_images
.get(&key)
.expect("Tried to get unknown external image");
let (texture_id, uv) = match handler_type {
match handler_type {
WebrenderImageHandlerType::WebGL => {
let (texture_id, size) = self.webgl_handler.as_mut().unwrap().lock(key.0);
(
texture_id,
TexelRect::new(0.0, size.height as f32, size.width as f32, 0.0),
)
let (source, size) = self.webgl_handler.as_mut().unwrap().lock(key.0);
let texture_id = match source {
WebrenderImageSource::TextureHandle(b) => b,
_ => panic!("Wrong type"),
};
webrender_api::ExternalImage {
uv: TexelRect::new(0.0, size.height as f32, size.width as f32, 0.0),
source: webrender_api::ExternalImageSource::NativeTexture(texture_id),
}
},
WebrenderImageHandlerType::Media => {
let (texture_id, size) = self.media_handler.as_mut().unwrap().lock(key.0);
(
texture_id,
TexelRect::new(0.0, 0.0, size.width as f32, size.height as f32),
)
let (source, size) = self.media_handler.as_mut().unwrap().lock(key.0);
let texture_id = match source {
WebrenderImageSource::TextureHandle(b) => b,
_ => panic!("Wrong type"),
};
webrender_api::ExternalImage {
uv: TexelRect::new(0.0, size.height as f32, size.width as f32, 0.0),
source: webrender_api::ExternalImageSource::NativeTexture(texture_id),
}
},
WebrenderImageHandlerType::WebGPU => {
let (source, size) = self.webgpu_handler.as_mut().unwrap().lock(key.0);
let buffer = match source {
WebrenderImageSource::Raw(b) => b,
_ => panic!("Wrong type"),
};
webrender_api::ExternalImage {
uv: TexelRect::new(0.0, size.height as f32, size.width as f32, 0.0),
source: webrender_api::ExternalImageSource::RawData(buffer),
}
},
};
webrender_api::ExternalImage {
uv,
source: webrender_api::ExternalImageSource::NativeTexture(texture_id),
}
}
@ -147,6 +172,9 @@ impl webrender_api::ExternalImageHandler for WebrenderExternalImageHandlers {
match handler_type {
WebrenderImageHandlerType::WebGL => self.webgl_handler.as_mut().unwrap().unlock(key.0),
WebrenderImageHandlerType::Media => self.media_handler.as_mut().unwrap().unlock(key.0),
WebrenderImageHandlerType::WebGPU => {
self.webgpu_handler.as_mut().unwrap().unlock(key.0)
},
};
}
}