libservo: Move GL acclerated media setup out of RenderingContext and simplify it (#35553)

This moves the GL accelerated media setup out of `RenderingContext`
which prevents making libservo dependo on the Wayland and X11 versions
of surfman explicitly. This support is experimental and (honestly) a bit
broken. I've confirmed that this works as well as it did before the
change.

The main thing here is that the configuration, which currently needs
surfman types, moves to servoshell. In addition:

1. Instead of passing the information to the Constellation, the setup is
   stored statically. This is necessary to avoid introducing a
   dependency on `media` in `webrender_traits`. It's quite likely that
   `media` types should move to the internal embedding API to avoid
   this. This is preserved for a followup change.
2. The whole system of wrapping the media channels in an abstract type
   is removed. They could be either mpsc channels or IPC channels. This
   was never going to work because mpsc channels cannot be serialized
   and deserialized with serde. Instead this just uses IPC channels. We
   also have other ways of doing this kind of abstraction in Servo so we
   do not need another. The `mpsc` version was hard-coded to be
   disabled.

Signed-off-by: Martin Robinson <mrobinson@igalia.com>
This commit is contained in:
Martin Robinson 2025-02-20 14:52:18 +01:00 committed by GitHub
parent 9887ad369d
commit 5465bfc2af
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 226 additions and 469 deletions

1
Cargo.lock generated
View file

@ -4549,6 +4549,7 @@ dependencies = [
"log",
"serde",
"servo-media",
"servo_config",
"webrender_api",
"webrender_traits",
]

View file

@ -131,7 +131,7 @@ use ipc_channel::router::ROUTER;
use ipc_channel::Error as IpcError;
use keyboard_types::webdriver::Event as WebDriverInputEvent;
use log::{debug, error, info, trace, warn};
use media::{GLPlayerThreads, WindowGLContext};
use media::WindowGLContext;
use net_traits::pub_domains::reg_host;
use net_traits::request::Referrer;
use net_traits::storage_thread::{StorageThreadMsg, StorageType};
@ -465,12 +465,6 @@ pub struct Constellation<STF, SWF> {
/// If True, exits on thread failure instead of displaying about:failure
hard_fail: bool,
/// Entry point to create and get channels to a GLPlayerThread.
glplayer_threads: Option<GLPlayerThreads>,
/// Application window's GL Context for Media player
player_context: WindowGLContext,
/// Pipeline ID of the active media session.
active_media_session: Option<PipelineId>,
@ -528,11 +522,6 @@ pub struct InitialConstellationState {
/// The XR device registry
pub webxr_registry: Option<webxr_api::Registry>,
pub glplayer_threads: Option<GLPlayerThreads>,
/// Application window's GL Context for Media player
pub player_context: WindowGLContext,
/// User agent string to report in network requests.
pub user_agent: Cow<'static, str>,
@ -759,8 +748,6 @@ where
pending_approval_navigations: HashMap::new(),
pressed_mouse_buttons: 0,
hard_fail,
glplayer_threads: state.glplayer_threads,
player_context: state.player_context,
active_media_session: None,
user_agent: state.user_agent,
rippy_data,
@ -1011,7 +998,7 @@ where
.as_ref()
.map(|threads| threads.pipeline()),
webxr_registry: self.webxr_registry.clone(),
player_context: self.player_context.clone(),
player_context: WindowGLContext::get(),
user_agent: self.user_agent.clone(),
rippy_data: self.rippy_data.clone(),
});
@ -2690,11 +2677,7 @@ where
}
debug!("Exiting GLPlayer thread.");
if let Some(glplayer_threads) = self.glplayer_threads.as_ref() {
if let Err(e) = glplayer_threads.exit() {
warn!("Exit GLPlayer Thread failed ({})", e);
}
}
WindowGLContext::get().exit();
debug!("Exiting the system font service thread.");
self.system_font_service.exit();

View file

@ -12,6 +12,7 @@ name = "media"
path = "lib.rs"
[dependencies]
servo_config = { path = "../config" }
euclid = { workspace = true }
fnv = { workspace = true }
ipc-channel = { workspace = true }

View file

@ -5,28 +5,36 @@
#![deny(unsafe_code)]
#![allow(clippy::type_complexity)]
mod media_channel;
mod media_thread;
use std::sync::{Arc, Mutex};
use euclid::default::Size2D;
use ipc_channel::ipc::{channel, IpcReceiver, IpcSender};
use log::warn;
use serde::{Deserialize, Serialize};
use servo_config::pref;
pub use servo_media::player::context::{GlApi, GlContext, NativeDisplay, PlayerGLContext};
use webrender_traits::{
WebrenderExternalImageApi, WebrenderExternalImageRegistry, WebrenderImageSource,
WebrenderExternalImageApi, WebrenderExternalImageHandlers, WebrenderExternalImageRegistry,
WebrenderImageHandlerType, WebrenderImageSource,
};
pub use crate::media_channel::glplayer_channel;
use crate::media_channel::{GLPlayerChan, GLPlayerPipeline, GLPlayerReceiver, GLPlayerSender};
use crate::media_thread::GLPlayerThread;
/// A global version of the [`WindowGLContext`] to be shared between the embedder and the
/// constellation. This is only okay to do because OpenGL contexts cannot be used across processes
/// anyway.
///
/// This avoid having to establish a depenency on `media` in `*_traits` crates.
static WINDOW_GL_CONTEXT: Mutex<WindowGLContext> = Mutex::new(WindowGLContext::inactive());
/// These are the messages that the GLPlayer thread will forward to
/// the video player which lives in htmlmediaelement
#[derive(Debug, Deserialize, Serialize)]
pub enum GLPlayerMsgForward {
PlayerId(u64),
Lock(GLPlayerSender<(u32, Size2D<i32>, usize)>),
Lock(IpcSender<(u32, Size2D<i32>, usize)>),
Unlock(),
}
@ -38,7 +46,7 @@ pub enum GLPlayerMsgForward {
#[derive(Debug, Deserialize, Serialize)]
pub enum GLPlayerMsg {
/// Registers an instantiated player in DOM
RegisterPlayer(GLPlayerSender<GLPlayerMsgForward>),
RegisterPlayer(IpcSender<GLPlayerMsgForward>),
/// Unregisters a player's ID
UnregisterPlayer(u64),
/// Locks a specific texture from a player. Lock messages are used
@ -53,7 +61,7 @@ pub enum GLPlayerMsg {
///
/// Currently OpenGL Sync Objects are used to implement the
/// synchronization mechanism.
Lock(u64, GLPlayerSender<(u32, Size2D<i32>, usize)>),
Lock(u64, IpcSender<(u32, Size2D<i32>, usize)>),
/// Unlocks a specific texture from a player. Unlock messages are
/// used for a correct synchronization with WebRender external
/// image API.
@ -67,56 +75,117 @@ pub enum GLPlayerMsg {
Exit,
}
/// A [`PlayerGLContext`] that renders to a window. Note that if the background
/// thread is not started for this context, then it is inactive (returning
/// `Unknown` values in the trait implementation).
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct WindowGLContext {
/// Application's GL Context
pub gl_context: GlContext,
pub context: GlContext,
/// Application's GL Api
pub gl_api: GlApi,
pub api: GlApi,
/// Application's native display
pub native_display: NativeDisplay,
pub display: NativeDisplay,
/// A channel to the GLPlayer thread.
pub glplayer_chan: Option<GLPlayerPipeline>,
pub glplayer_thread_sender: Option<IpcSender<GLPlayerMsg>>,
}
impl WindowGLContext {
/// Create an inactive [`WindowGLContext`].
pub const fn inactive() -> Self {
WindowGLContext {
context: GlContext::Unknown,
api: GlApi::None,
display: NativeDisplay::Unknown,
glplayer_thread_sender: None,
}
}
pub fn register(context: Self) {
*WINDOW_GL_CONTEXT.lock().unwrap() = context;
}
pub fn get() -> Self {
WINDOW_GL_CONTEXT.lock().unwrap().clone()
}
/// Sends an exit message to close the GLPlayerThread.
pub fn exit(&self) {
self.send(GLPlayerMsg::Exit);
}
#[inline]
pub fn send(&self, message: GLPlayerMsg) {
// Don't do anything if GL accelerated playback is disabled.
let Some(sender) = self.glplayer_thread_sender.as_ref() else {
return;
};
if let Err(error) = sender.send(message) {
warn!("Could no longer communicate with GL accelerated media threads: {error}")
}
}
pub fn initialize(display: NativeDisplay, api: GlApi, context: GlContext) {
if matches!(display, NativeDisplay::Unknown) || matches!(context, GlContext::Unknown) {
return;
}
let mut window_gl_context = WINDOW_GL_CONTEXT.lock().unwrap();
if window_gl_context.glplayer_thread_sender.is_some() {
warn!("Not going to initialize GL accelerated media playback more than once.");
return;
}
window_gl_context.context = context;
window_gl_context.display = display;
window_gl_context.api = api;
}
pub fn initialize_image_handler(
external_image_handlers: &mut WebrenderExternalImageHandlers,
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
) {
if !pref!(media_glvideo_enabled) {
return;
}
let mut window_gl_context = WINDOW_GL_CONTEXT.lock().unwrap();
if window_gl_context.glplayer_thread_sender.is_some() {
warn!("Not going to initialize GL accelerated media playback more than once.");
return;
}
if matches!(window_gl_context.display, NativeDisplay::Unknown) ||
matches!(window_gl_context.context, GlContext::Unknown)
{
return;
}
let thread_sender = GLPlayerThread::start(external_images);
let image_handler = Box::new(GLPlayerExternalImages::new(thread_sender.clone()));
external_image_handlers.set_handler(image_handler, WebrenderImageHandlerType::Media);
window_gl_context.glplayer_thread_sender = Some(thread_sender);
}
}
impl PlayerGLContext for WindowGLContext {
fn get_gl_context(&self) -> GlContext {
self.gl_context.clone()
match self.glplayer_thread_sender {
Some(..) => self.context.clone(),
None => GlContext::Unknown,
}
}
fn get_native_display(&self) -> NativeDisplay {
self.native_display.clone()
match self.glplayer_thread_sender {
Some(..) => self.display.clone(),
None => NativeDisplay::Unknown,
}
}
fn get_gl_api(&self) -> GlApi {
self.gl_api.clone()
}
}
/// GLPlayer Threading API entry point that lives in the constellation.
pub struct GLPlayerThreads(GLPlayerSender<GLPlayerMsg>);
impl GLPlayerThreads {
pub fn new(
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
) -> (GLPlayerThreads, Box<dyn WebrenderExternalImageApi>) {
let channel = GLPlayerThread::start(external_images);
let external = GLPlayerExternalImages::new(channel.clone());
(GLPlayerThreads(channel), Box::new(external))
}
/// Gets the GLPlayerThread handle for each script pipeline.
pub fn pipeline(&self) -> GLPlayerPipeline {
// This mode creates a single thread, so the existing
// GLPlayerChan is just cloned.
GLPlayerPipeline(GLPlayerChan(self.0.clone()))
}
/// Sends an exit message to close the GLPlayerThreads
pub fn exit(&self) -> Result<(), &'static str> {
self.0
.send(GLPlayerMsg::Exit)
.map_err(|_| "Failed to send Exit message")
self.api.clone()
}
}
@ -126,20 +195,20 @@ struct GLPlayerExternalImages {
// @FIXME(victor): this should be added when GstGLSyncMeta is
// added
//webrender_gl: Rc<dyn gl::Gl>,
glplayer_channel: GLPlayerSender<GLPlayerMsg>,
glplayer_channel: IpcSender<GLPlayerMsg>,
// Used to avoid creating a new channel on each received WebRender
// request.
lock_channel: (
GLPlayerSender<(u32, Size2D<i32>, usize)>,
GLPlayerReceiver<(u32, Size2D<i32>, usize)>,
IpcSender<(u32, Size2D<i32>, usize)>,
IpcReceiver<(u32, Size2D<i32>, usize)>,
),
}
impl GLPlayerExternalImages {
fn new(channel: GLPlayerSender<GLPlayerMsg>) -> Self {
fn new(sender: IpcSender<GLPlayerMsg>) -> Self {
Self {
glplayer_channel: channel,
lock_channel: glplayer_channel().unwrap(),
glplayer_channel: sender,
lock_channel: channel().unwrap(),
}
}
}

View file

@ -1,15 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::io;
use serde::{Deserialize, Serialize};
pub type GLPlayerSender<T> = ipc_channel::ipc::IpcSender<T>;
pub type GLPlayerReceiver<T> = ipc_channel::ipc::IpcReceiver<T>;
pub fn glplayer_channel<T: Serialize + for<'de> Deserialize<'de>>(
) -> Result<(GLPlayerSender<T>, GLPlayerReceiver<T>), io::Error> {
ipc_channel::ipc::channel()
}

View file

@ -1,113 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Enum wrappers to be able to select different channel implementations at runtime.
mod ipc;
mod mpsc;
use std::fmt;
use serde::{Deserialize, Serialize};
use crate::GLPlayerMsg;
#[derive(Deserialize, Serialize)]
pub enum GLPlayerSender<T: Serialize> {
Ipc(ipc::GLPlayerSender<T>),
Mpsc(mpsc::GLPlayerSender<T>),
}
impl<T> Clone for GLPlayerSender<T>
where
T: Serialize,
{
fn clone(&self) -> Self {
match *self {
GLPlayerSender::Ipc(ref chan) => GLPlayerSender::Ipc(chan.clone()),
GLPlayerSender::Mpsc(ref chan) => GLPlayerSender::Mpsc(chan.clone()),
}
}
}
impl<T: Serialize> fmt::Debug for GLPlayerSender<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "GLPlayerSender(..)")
}
}
impl<T: Serialize> GLPlayerSender<T> {
#[inline]
pub fn send(&self, msg: T) -> GLPlayerSendResult {
match *self {
GLPlayerSender::Ipc(ref sender) => sender.send(msg).map_err(|_| ()),
GLPlayerSender::Mpsc(ref sender) => sender.send(msg).map_err(|_| ()),
}
}
}
pub type GLPlayerSendResult = Result<(), ()>;
pub enum GLPlayerReceiver<T>
where
T: for<'de> Deserialize<'de> + Serialize,
{
Ipc(ipc::GLPlayerReceiver<T>),
Mpsc(mpsc::GLPlayerReceiver<T>),
}
impl<T> GLPlayerReceiver<T>
where
T: for<'de> Deserialize<'de> + Serialize,
{
pub fn recv(&self) -> Result<T, ()> {
match *self {
GLPlayerReceiver::Ipc(ref receiver) => receiver.recv().map_err(|_| ()),
GLPlayerReceiver::Mpsc(ref receiver) => receiver.recv().map_err(|_| ()),
}
}
#[allow(clippy::wrong_self_convention)] // It is an alias to the underlying module
pub fn to_ipc_receiver(self) -> ipc_channel::ipc::IpcReceiver<T> {
match self {
GLPlayerReceiver::Ipc(receiver) => receiver,
_ => unreachable!(),
}
}
}
pub fn glplayer_channel<T>() -> Option<(GLPlayerSender<T>, GLPlayerReceiver<T>)>
where
T: for<'de> Deserialize<'de> + Serialize,
{
// Let's use Ipc until we move the Player instance into GPlayerThread
if true {
ipc::glplayer_channel()
.map(|(tx, rx)| (GLPlayerSender::Ipc(tx), GLPlayerReceiver::Ipc(rx)))
.ok()
} else {
mpsc::glplayer_channel()
.map(|(tx, rx)| (GLPlayerSender::Mpsc(tx), GLPlayerReceiver::Mpsc(rx)))
.ok()
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct GLPlayerChan(pub GLPlayerSender<GLPlayerMsg>);
impl GLPlayerChan {
#[inline]
pub fn send(&self, msg: GLPlayerMsg) -> GLPlayerSendResult {
self.0.send(msg)
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct GLPlayerPipeline(pub GLPlayerChan);
impl GLPlayerPipeline {
pub fn channel(&self) -> GLPlayerChan {
self.0.clone()
}
}

View file

@ -1,57 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::sync::mpsc;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
macro_rules! unreachable_serializable {
($name:ident) => {
impl<T> Serialize for $name<T> {
fn serialize<S: Serializer>(&self, _: S) -> Result<S::Ok, S::Error> {
unreachable!();
}
}
impl<'a, T> Deserialize<'a> for $name<T> {
fn deserialize<D>(_: D) -> Result<$name<T>, D::Error>
where
D: Deserializer<'a>,
{
unreachable!();
}
}
};
}
pub struct GLPlayerSender<T>(mpsc::Sender<T>);
pub struct GLPlayerReceiver<T>(mpsc::Receiver<T>);
impl<T> Clone for GLPlayerSender<T> {
fn clone(&self) -> Self {
GLPlayerSender(self.0.clone())
}
}
impl<T> GLPlayerSender<T> {
#[inline]
pub fn send(&self, data: T) -> Result<(), mpsc::SendError<T>> {
self.0.send(data)
}
}
impl<T> GLPlayerReceiver<T> {
#[inline]
pub fn recv(&self) -> Result<T, mpsc::RecvError> {
self.0.recv()
}
}
pub fn glplayer_channel<T>() -> Result<(GLPlayerSender<T>, GLPlayerReceiver<T>), ()> {
let (sender, receiver) = mpsc::channel();
Ok((GLPlayerSender(sender), GLPlayerReceiver(receiver)))
}
unreachable_serializable!(GLPlayerReceiver);
unreachable_serializable!(GLPlayerSender);

View file

@ -6,11 +6,11 @@ use std::sync::{Arc, Mutex};
use std::thread;
use fnv::FnvHashMap;
use ipc_channel::ipc::{channel, IpcSender};
use log::{trace, warn};
use webrender_api::ExternalImageId;
use webrender_traits::{WebrenderExternalImageRegistry, WebrenderImageHandlerType};
use crate::media_channel::{glplayer_channel, GLPlayerSender};
/// GL player threading API entry point that lives in the
/// constellation.
use crate::{GLPlayerMsg, GLPlayerMsgForward};
@ -19,7 +19,7 @@ use crate::{GLPlayerMsg, GLPlayerMsgForward};
/// a set of video players with GL render.
pub struct GLPlayerThread {
/// Map of live players.
players: FnvHashMap<u64, GLPlayerSender<GLPlayerMsgForward>>,
players: FnvHashMap<u64, IpcSender<GLPlayerMsgForward>>,
/// List of registered webrender external images.
/// We use it to get an unique ID for new players.
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
@ -35,8 +35,8 @@ impl GLPlayerThread {
pub fn start(
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
) -> GLPlayerSender<GLPlayerMsg> {
let (sender, receiver) = glplayer_channel::<GLPlayerMsg>().unwrap();
) -> IpcSender<GLPlayerMsg> {
let (sender, receiver) = channel().unwrap();
thread::Builder::new()
.name("GLPlayer".to_owned())
.spawn(move || {

View file

@ -17,10 +17,10 @@ use headers::{ContentLength, ContentRange, HeaderMapExt};
use html5ever::{local_name, namespace_url, ns, LocalName, Prefix};
use http::header::{self, HeaderMap, HeaderValue};
use http::StatusCode;
use ipc_channel::ipc::{self, IpcSharedMemory};
use ipc_channel::ipc::{self, channel, IpcSharedMemory};
use ipc_channel::router::ROUTER;
use js::jsapi::JSAutoRealm;
use media::{glplayer_channel, GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
use media::{GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
use net_traits::request::{Destination, RequestId};
use net_traits::{
FetchMetadata, FetchResponseListener, Metadata, NetworkError, ResourceFetchTiming,
@ -1399,12 +1399,10 @@ impl HTMLMediaElement {
// GLPlayer thread setup
let (player_id, image_receiver) = window
.get_player_context()
.glplayer_chan
.glplayer_thread_sender
.map(|pipeline| {
let (image_sender, image_receiver) =
glplayer_channel::<GLPlayerMsgForward>().unwrap();
let (image_sender, image_receiver) = channel().unwrap();
pipeline
.channel()
.send(GLPlayerMsg::RegisterPlayer(image_sender))
.unwrap();
match image_receiver.recv().unwrap() {
@ -1425,7 +1423,7 @@ impl HTMLMediaElement {
.media_element_task_source()
.to_sendable();
ROUTER.add_typed_route(
image_receiver.to_ipc_receiver(),
image_receiver,
Box::new(move |message| {
let msg = message.unwrap();
let this = trusted_node.clone();
@ -2051,14 +2049,8 @@ impl HTMLMediaElement {
impl Drop for HTMLMediaElement {
fn drop(&mut self) {
if let Some(ref pipeline) = self.player_context.glplayer_chan {
if let Err(err) = pipeline
.channel()
.send(GLPlayerMsg::UnregisterPlayer(self.id.get()))
{
warn!("GLPlayer disappeared!: {:?}", err);
}
}
self.player_context
.send(GLPlayerMsg::UnregisterPlayer(self.id.get()));
}
}

View file

@ -99,6 +99,7 @@ servo_geometry = { path = "../geometry" }
servo_url = { path = "../url" }
style = { workspace = true }
style_traits = { workspace = true }
surfman = { workspace = true }
tracing = { workspace = true, optional = true }
url = { workspace = true }
webdriver_server = { path = "../webdriver_server", optional = true }
@ -109,12 +110,10 @@ webrender_traits = { workspace = true }
webxr-api = { workspace = true, optional = true }
[target.'cfg(any(target_os = "android", target_env = "ohos"))'.dependencies]
surfman = { workspace = true, features = ["sm-angle-default"] }
webxr = { path = "../webxr", optional = true }
[target.'cfg(not(any(target_os = "android", target_env = "ohos")))'.dependencies]
arboard = { workspace = true, optional = true }
surfman = { workspace = true, features = ["sm-x11", "sm-raw-window-handle-06"] }
webxr = { path = "../webxr", features = ["ipc", "glwindow", "headless"] }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "ios"), not(target_os = "android"), not(target_env = "ohos"), not(target_arch = "arm"), not(target_arch = "aarch64")))'.dependencies]

View file

@ -80,7 +80,7 @@ pub use keyboard_types::*;
#[cfg(feature = "layout_2013")]
pub use layout_thread_2013;
use log::{warn, Log, Metadata, Record};
use media::{GLPlayerThreads, GlApi, NativeDisplay, WindowGLContext};
use media::{GlApi, NativeDisplay, WindowGLContext};
use net::protocols::ProtocolRegistry;
use net::resource_thread::new_resource_threads;
use profile::{mem as profile_mem, time as profile_time};
@ -101,7 +101,6 @@ pub use webgpu;
use webgpu::swapchain::WGPUImageMap;
use webrender::{RenderApiSender, ShaderPrecacheFlags, UploadMethod, ONE_TIME_USAGE_HINT};
use webrender_api::{ColorF, DocumentId, FramePublishId};
use webrender_traits::rendering_context::GLVersion;
pub use webrender_traits::rendering_context::{
OffscreenRenderingContext, RenderingContext, SoftwareRenderingContext, SurfmanRenderingContext,
WindowRenderingContext,
@ -462,10 +461,9 @@ impl Servo {
WebrenderImageHandlerType::WebGPU,
);
let (player_context, glplayer_threads) = Self::create_media_window_gl_context(
WindowGLContext::initialize_image_handler(
&mut external_image_handlers,
external_images.clone(),
&rendering_context,
);
webrender.set_external_image_handler(external_image_handlers);
@ -494,9 +492,7 @@ impl Servo {
webrender_api_sender,
#[cfg(feature = "webxr")]
webxr_main_thread.registry(),
player_context,
Some(webgl_threads),
glplayer_threads,
window_size,
external_images,
#[cfg(feature = "webgpu")]
@ -558,66 +554,10 @@ impl Servo {
*self.delegate.borrow_mut() = delegate;
}
fn create_media_window_gl_context(
external_image_handlers: &mut WebrenderExternalImageHandlers,
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
rendering_context: &Rc<dyn RenderingContext>,
) -> (WindowGLContext, Option<GLPlayerThreads>) {
if !pref!(media_glvideo_enabled) {
return (
WindowGLContext {
gl_context: GlContext::Unknown,
gl_api: GlApi::None,
native_display: NativeDisplay::Unknown,
glplayer_chan: None,
},
None,
);
}
let native_display = rendering_context.gl_display();
let gl_context = rendering_context.gl_context();
if let (NativeDisplay::Unknown, GlContext::Unknown) = (&native_display, &gl_context) {
return (
WindowGLContext {
gl_context: GlContext::Unknown,
gl_api: GlApi::None,
native_display: NativeDisplay::Unknown,
glplayer_chan: None,
},
None,
);
}
let gl_api = match rendering_context.gl_version() {
GLVersion::GL(major, minor) => {
if major >= 3 && minor >= 2 {
GlApi::OpenGL3
} else {
GlApi::OpenGL
}
},
GLVersion::GLES(major, _) => {
if major > 1 {
GlApi::Gles2
} else {
GlApi::Gles1
}
},
};
assert!(!matches!(gl_context, GlContext::Unknown));
let (glplayer_threads, image_handler) = GLPlayerThreads::new(external_images.clone());
external_image_handlers.set_handler(image_handler, WebrenderImageHandlerType::Media);
(
WindowGLContext {
gl_context,
native_display,
gl_api,
glplayer_chan: Some(GLPlayerThreads::pipeline(&glplayer_threads)),
},
Some(glplayer_threads),
)
/// **EXPERIMENTAL:** Intialize GL accelerated media playback. This currently only works on a limited number
/// of platforms. This should be run *before* calling [`Servo::new`] and creating the first [`WebView`].
pub fn initialize_gl_accelerated_media(display: NativeDisplay, api: GlApi, context: GlContext) {
WindowGLContext::initialize(display, api, context)
}
/// Spin the Servo event loop, which:
@ -1096,9 +1036,7 @@ fn create_constellation(
webrender_document: DocumentId,
webrender_api_sender: RenderApiSender,
#[cfg(feature = "webxr")] webxr_registry: webxr_api::Registry,
player_context: WindowGLContext,
webgl_threads: Option<WebGLThreads>,
glplayer_threads: Option<GLPlayerThreads>,
initial_window_size: WindowSizeData,
external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
#[cfg(feature = "webgpu")] wgpu_image_map: WGPUImageMap,
@ -1151,8 +1089,6 @@ fn create_constellation(
#[cfg(not(feature = "webxr"))]
webxr_registry: None,
webgl_threads,
glplayer_threads,
player_context,
user_agent,
webrender_external_images: external_images,
#[cfg(feature = "webgpu")]

View file

@ -4,7 +4,7 @@
#![deny(unsafe_code)]
use std::cell::{Cell, RefCell};
use std::cell::{Cell, RefCell, RefMut};
use std::ffi::c_void;
use std::num::NonZeroU32;
use std::rc::Rc;
@ -17,24 +17,13 @@ use glow::NativeFramebuffer;
use image::RgbaImage;
use log::{debug, trace, warn};
use raw_window_handle::{DisplayHandle, WindowHandle};
use servo_media::player::context::{GlContext, NativeDisplay};
use surfman::chains::{PreserveBuffer, SwapChain};
#[cfg(all(target_os = "linux", not(target_env = "ohos")))]
use surfman::platform::generic::multi::connection::NativeConnection as LinuxNativeConnection;
#[cfg(all(target_os = "linux", not(target_env = "ohos")))]
use surfman::platform::generic::multi::context::NativeContext as LinuxNativeContext;
pub use surfman::Error;
use surfman::{
Adapter, Connection, Context, ContextAttributeFlags, ContextAttributes, Device, GLApi,
NativeContext, NativeWidget, Surface, SurfaceAccess, SurfaceInfo, SurfaceTexture, SurfaceType,
};
/// Describes the OpenGL version that is requested when a context is created.
pub enum GLVersion {
GL(u8, u8),
GLES(u8, u8),
}
/// The `RenderingContext` trait defines a set of methods for managing
/// an OpenGL or GLES rendering context.
/// Implementors of this trait are responsible for handling the creation,
@ -63,16 +52,6 @@ pub trait RenderingContext {
fn make_current(&self) -> Result<(), Error>;
/// Returns the OpenGL or GLES API.
fn gl_api(&self) -> Rc<dyn gleam::gl::Gl>;
/// Describes the OpenGL version that is requested when a context is created.
fn gl_version(&self) -> GLVersion;
/// Returns the GL Context used by servo media player. Default to `GlContext::Unknown`.
fn gl_context(&self) -> GlContext {
GlContext::Unknown
}
/// Returns the GL Display used by servo media player. Default to `NativeDisplay::Unknown`.
fn gl_display(&self) -> NativeDisplay {
NativeDisplay::Unknown
}
/// Creates a texture from a given surface and returns the surface texture,
/// the OpenGL texture object, and the size of the surface. Default to `None`.
fn create_texture(&self, _surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> {
@ -215,82 +194,6 @@ impl SurfmanRenderingContext {
}
impl RenderingContext for SurfmanRenderingContext {
fn gl_context(&self) -> GlContext {
#[cfg(all(target_os = "linux", not(target_env = "ohos")))]
{
match self.native_context() {
NativeContext::Default(LinuxNativeContext::Default(native_context)) => {
GlContext::Egl(native_context.egl_context as usize)
},
NativeContext::Default(LinuxNativeContext::Alternate(native_context)) => {
GlContext::Egl(native_context.egl_context as usize)
},
NativeContext::Alternate(_) => GlContext::Unknown,
}
}
#[cfg(target_os = "windows")]
{
#[cfg(feature = "no-wgl")]
{
GlContext::Egl(self.native_context().egl_context as usize)
}
#[cfg(not(feature = "no-wgl"))]
GlContext::Unknown
}
#[cfg(not(any(
target_os = "windows",
all(target_os = "linux", not(target_env = "ohos"))
)))]
{
GlContext::Unknown
}
}
fn gl_display(&self) -> NativeDisplay {
#[cfg(all(target_os = "linux", not(target_env = "ohos")))]
{
match self.device.borrow().connection().native_connection() {
surfman::NativeConnection::Default(LinuxNativeConnection::Default(connection)) => {
NativeDisplay::Egl(connection.0 as usize)
},
surfman::NativeConnection::Default(LinuxNativeConnection::Alternate(
connection,
)) => NativeDisplay::X11(connection.x11_display as usize),
surfman::NativeConnection::Alternate(_) => NativeDisplay::Unknown,
}
}
#[cfg(target_os = "windows")]
{
#[cfg(feature = "no-wgl")]
{
let device = &self.device.borrow();
NativeDisplay::Egl(device.native_device().egl_display as usize)
}
#[cfg(not(feature = "no-wgl"))]
NativeDisplay::Unknown
}
#[cfg(not(any(
target_os = "windows",
all(target_os = "linux", not(target_env = "ohos"))
)))]
{
NativeDisplay::Unknown
}
}
fn gl_version(&self) -> GLVersion {
let device = self.device.borrow();
let context = self.context.borrow();
let descriptor = device.context_descriptor(&context);
let attributes = device.context_descriptor_attributes(&descriptor);
let major = attributes.version.major;
let minor = attributes.version.minor;
match device.connection().gl_api() {
GLApi::GL => GLVersion::GL(major, minor),
GLApi::GLES => GLVersion::GLES(major, minor),
}
}
fn gl_api(&self) -> Rc<dyn gleam::gl::Gl> {
self.gl.clone()
}
@ -397,14 +300,6 @@ impl Drop for SoftwareRenderingContext {
}
impl RenderingContext for SoftwareRenderingContext {
fn gl_context(&self) -> GlContext {
self.surfman_rendering_info.gl_context()
}
fn gl_display(&self) -> NativeDisplay {
self.surfman_rendering_info.gl_display()
}
fn prepare_for_rendering(&self) {
self.surfman_rendering_info.prepare_for_rendering();
}
@ -436,10 +331,6 @@ impl RenderingContext for SoftwareRenderingContext {
self.surfman_rendering_info.gl.clone()
}
fn gl_version(&self) -> GLVersion {
self.surfman_rendering_info.gl_version()
}
fn create_texture(&self, surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> {
self.surfman_rendering_info.create_texture(surface)
}
@ -544,17 +435,13 @@ impl WindowRenderingContext {
device.make_context_current(&context)?;
Ok(())
}
pub fn surfman_details(&self) -> (RefMut<Device>, RefMut<Context>) {
(self.0.device.borrow_mut(), self.0.context.borrow_mut())
}
}
impl RenderingContext for WindowRenderingContext {
fn gl_context(&self) -> GlContext {
self.0.gl_context()
}
fn gl_display(&self) -> NativeDisplay {
self.0.gl_display()
}
fn prepare_for_rendering(&self) {
self.0.prepare_for_rendering();
}
@ -584,10 +471,6 @@ impl RenderingContext for WindowRenderingContext {
self.0.gl.clone()
}
fn gl_version(&self) -> GLVersion {
self.0.gl_version()
}
fn create_texture(&self, surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> {
self.0.create_texture(surface)
}
@ -873,10 +756,6 @@ impl RenderingContext for OffscreenRenderingContext {
self.parent_context.gl_api()
}
fn gl_version(&self) -> GLVersion {
self.parent_context.gl_version()
}
fn create_texture(&self, surface: Surface) -> Option<(SurfaceTexture, u32, Size2D<i32>)> {
self.parent_context.create_texture(surface)
}

View file

@ -0,0 +1,73 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::RefMut;
use surfman::{Context, Device};
#[cfg(not(any(
target_os = "windows",
all(target_os = "linux", not(target_env = "ohos"))
)))]
pub(crate) fn setup_gl_accelerated_media(_: RefMut<Device>, _: RefMut<Context>) {}
#[cfg(all(target_os = "linux", not(target_env = "ohos")))]
pub(crate) fn setup_gl_accelerated_media(device: RefMut<Device>, context: RefMut<Context>) {
use servo::media::{GlContext, NativeDisplay};
use servo::Servo;
use surfman::platform::generic::multi::connection::NativeConnection;
use surfman::platform::generic::multi::context::NativeContext;
let api = api(&device, &context);
let context = match device.native_context(&context) {
NativeContext::Default(NativeContext::Default(native_context)) => {
GlContext::Egl(native_context.egl_context as usize)
},
NativeContext::Default(NativeContext::Alternate(native_context)) => {
GlContext::Egl(native_context.egl_context as usize)
},
NativeContext::Alternate(_) => GlContext::Unknown,
};
let display = match device.connection().native_connection() {
surfman::NativeConnection::Default(NativeConnection::Default(connection)) => {
NativeDisplay::Egl(connection.0 as usize)
},
surfman::NativeConnection::Default(NativeConnection::Alternate(connection)) => {
NativeDisplay::X11(connection.x11_display as usize)
},
surfman::NativeConnection::Alternate(_) => NativeDisplay::Unknown,
};
Servo::initialize_gl_accelerated_media(display, api, context);
}
#[cfg(target_os = "windows")]
pub(crate) fn setup_gl_accelerated_media(device: RefMut<Device>, context: RefMut<Context>) {
use servo::media::{GlContext, NativeDisplay};
use servo::Servo;
let api = api(&device, &context);
let context = GlContext::Egl(device.native_context(&context).egl_context as usize);
let display = NativeDisplay::Egl(device.native_device().egl_display as usize);
Servo::initialize_gl_accelerated_media(display, api, context);
}
#[cfg(any(
all(target_os = "linux", not(target_env = "ohos")),
target_os = "windows"
))]
fn api(device: &RefMut<Device>, context: &RefMut<Context>) -> servo::media::GlApi {
use servo::media::GlApi;
use surfman::GLApi;
let descriptor = device.context_descriptor(context);
let attributes = device.context_descriptor_attributes(&descriptor);
let major = attributes.version.major;
let minor = attributes.version.minor;
match device.connection().gl_api() {
GLApi::GL if major >= 3 && minor >= 2 => GlApi::OpenGL3,
GLApi::GL => GlApi::OpenGL,
GLApi::GLES if major > 1 => GlApi::Gles2,
GLApi::GLES => GlApi::Gles1,
}
}

View file

@ -43,6 +43,7 @@ use super::app_state::RunningAppState;
use super::geometry::{winit_position_to_euclid_point, winit_size_to_euclid_size};
use super::keyutils::{keyboard_event_from_winit, CMD_OR_ALT};
use super::window_trait::{WindowPortsMethods, LINE_HEIGHT};
use crate::desktop::accelerated_gl_media::setup_gl_accelerated_media;
use crate::desktop::keyutils::CMD_OR_CONTROL;
use crate::prefs::ServoShellPreferences;
@ -132,6 +133,13 @@ impl Window {
.expect("Could not create RenderingContext for Window"),
);
// Setup for GL accelerated media handling. This is only active on certain Linux platforms
// and Windows.
{
let details = window_rendering_context.surfman_details();
setup_gl_accelerated_media(details.0, details.1);
}
// Make sure the gl context is made current.
window_rendering_context.make_current().unwrap();

View file

@ -4,6 +4,7 @@
//! Contains files specific to the servoshell app for Desktop systems.
mod accelerated_gl_media;
pub(crate) mod app;
mod app_state;
pub(crate) mod cli;