diff --git a/Cargo.lock b/Cargo.lock index 200c3d86bec..b47d10f887b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8488,7 +8488,6 @@ dependencies = [ [[package]] name = "webxr" version = "0.0.1" -source = "git+https://github.com/servo/webxr#4fd38cf6dd29ac58bafd0be2ef5337827853dcdd" dependencies = [ "crossbeam-channel", "euclid", @@ -8506,7 +8505,6 @@ dependencies = [ [[package]] name = "webxr-api" version = "0.0.1" -source = "git+https://github.com/servo/webxr#4fd38cf6dd29ac58bafd0be2ef5337827853dcdd" dependencies = [ "euclid", "ipc-channel", diff --git a/Cargo.toml b/Cargo.toml index 4ecfb2a1173..87164c6fb94 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -230,3 +230,7 @@ codegen-units = 1 # # [patch."https://github.com/servo/"] # = { path = "/path/to/local/checkout" } + +[patch."https://github.com/servo/webxr"] +webxr = { path = "components/webxr" } +webxr-api = { path = "components/shared/webxr" } diff --git a/components/shared/webxr/Cargo.toml b/components/shared/webxr/Cargo.toml new file mode 100644 index 00000000000..47caee00131 --- /dev/null +++ b/components/shared/webxr/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "webxr-api" +version = "0.0.1" +authors = ["The Servo Project Developers"] +edition = "2018" + +homepage = "https://github.com/servo/webxr" +repository = "https://github.com/servo/webxr" +keywords = ["ar", "headset", "openxr", "vr", "webxr"] +license = "MPL-2.0" + +description = '''A safe Rust API that provides a way to interact with +virtual reality and augmented reality devices and integration with OpenXR. +The API is inspired by the WebXR Device API (https://www.w3.org/TR/webxr/) +but adapted to Rust design patterns.''' + +[lib] +path = "lib.rs" + +[features] +ipc = ["serde", "ipc-channel", "euclid/serde"] + +[dependencies] +euclid = "0.22" +ipc-channel = { version = "0.19", optional = true } +log = "0.4" +serde = { version = "1.0", optional = true } +time = { version = "0.1", optional = true } diff --git a/components/shared/webxr/device.rs b/components/shared/webxr/device.rs new file mode 100644 index 00000000000..65f34d8560b --- /dev/null +++ b/components/shared/webxr/device.rs @@ -0,0 +1,114 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +//! Traits to be implemented by backends + +use crate::ContextId; +use crate::EnvironmentBlendMode; +use crate::Error; +use crate::Event; +use crate::Floor; +use crate::Frame; +use crate::HitTestId; +use crate::HitTestSource; +use crate::InputSource; +use crate::LayerId; +use crate::LayerInit; +use crate::Native; +use crate::Quitter; +use crate::Sender; +use crate::Session; +use crate::SessionBuilder; +use crate::SessionInit; +use crate::SessionMode; +use crate::Viewports; + +use euclid::{Point2D, RigidTransform3D}; + +/// A trait for discovering XR devices +pub trait DiscoveryAPI: 'static { + fn request_session( + &mut self, + mode: SessionMode, + init: &SessionInit, + xr: SessionBuilder, + ) -> Result; + fn supports_session(&self, mode: SessionMode) -> bool; +} + +/// A trait for using an XR device +pub trait DeviceAPI: 'static { + /// Create a new layer + fn create_layer(&mut self, context_id: ContextId, init: LayerInit) -> Result; + + /// Destroy a layer + fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId); + + /// The transform from native coordinates to the floor. + fn floor_transform(&self) -> Option>; + + fn viewports(&self) -> Viewports; + + /// Begin an animation frame. + fn begin_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Option; + + /// End an animation frame, render the layer to the device, and block waiting for the next frame. + fn end_animation_frame(&mut self, layers: &[(ContextId, LayerId)]); + + /// Inputs registered with the device on initialization. More may be added, which + /// should be communicated through a yet-undecided event mechanism + fn initial_inputs(&self) -> Vec; + + /// Sets the event handling channel + fn set_event_dest(&mut self, dest: Sender); + + /// Quit the session + fn quit(&mut self); + + fn set_quitter(&mut self, quitter: Quitter); + + fn update_clip_planes(&mut self, near: f32, far: f32); + + fn environment_blend_mode(&self) -> EnvironmentBlendMode { + // for VR devices, override for AR + EnvironmentBlendMode::Opaque + } + + fn granted_features(&self) -> &[String]; + + fn request_hit_test(&mut self, _source: HitTestSource) { + panic!("This device does not support requesting hit tests"); + } + + fn cancel_hit_test(&mut self, _id: HitTestId) { + panic!("This device does not support hit tests"); + } + + fn update_frame_rate(&mut self, rate: f32) -> f32 { + rate + } + + fn supported_frame_rates(&self) -> Vec { + Vec::new() + } + + fn reference_space_bounds(&self) -> Option>> { + None + } +} + +impl DiscoveryAPI for Box> { + fn request_session( + &mut self, + mode: SessionMode, + init: &SessionInit, + xr: SessionBuilder, + ) -> Result { + (&mut **self).request_session(mode, init, xr) + } + + fn supports_session(&self, mode: SessionMode) -> bool { + (&**self).supports_session(mode) + } +} diff --git a/components/shared/webxr/error.rs b/components/shared/webxr/error.rs new file mode 100644 index 00000000000..86822f6fb7a --- /dev/null +++ b/components/shared/webxr/error.rs @@ -0,0 +1,21 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#[cfg(feature = "ipc")] +use serde::{Deserialize, Serialize}; + +/// Errors that can be produced by XR. + +// TODO: this is currently incomplete! + +#[derive(Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Error { + NoMatchingDevice, + CommunicationError, + ThreadCreationError, + InlineSession, + UnsupportedFeature(String), + BackendSpecific(String), +} diff --git a/components/shared/webxr/events.rs b/components/shared/webxr/events.rs new file mode 100644 index 00000000000..338913464ba --- /dev/null +++ b/components/shared/webxr/events.rs @@ -0,0 +1,80 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use euclid::RigidTransform3D; + +use crate::ApiSpace; +use crate::BaseSpace; +use crate::Frame; +use crate::InputFrame; +use crate::InputId; +use crate::InputSource; +use crate::SelectEvent; +use crate::SelectKind; +use crate::Sender; + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum Event { + /// Input source connected + AddInput(InputSource), + /// Input source disconnected + RemoveInput(InputId), + /// Input updated (this is a disconnect+reconnect) + UpdateInput(InputId, InputSource), + /// Session ended by device + SessionEnd, + /// Session focused/blurred/etc + VisibilityChange(Visibility), + /// Selection started / ended + Select(InputId, SelectKind, SelectEvent, Frame), + /// Input from an input source has changed + InputChanged(InputId, InputFrame), + /// Reference space has changed + ReferenceSpaceChanged(BaseSpace, RigidTransform3D), +} + +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum Visibility { + /// Session fully displayed to user + Visible, + /// Session still visible, but is not the primary focus + VisibleBlurred, + /// Session not visible + Hidden, +} + +/// Convenience structure for buffering up events +/// when no event callback has been set +pub enum EventBuffer { + Buffered(Vec), + Sink(Sender), +} + +impl Default for EventBuffer { + fn default() -> Self { + EventBuffer::Buffered(vec![]) + } +} + +impl EventBuffer { + pub fn callback(&mut self, event: Event) { + match *self { + EventBuffer::Buffered(ref mut events) => events.push(event), + EventBuffer::Sink(ref dest) => { + let _ = dest.send(event); + } + } + } + + pub fn upgrade(&mut self, dest: Sender) { + if let EventBuffer::Buffered(ref mut events) = *self { + for event in events.drain(..) { + let _ = dest.send(event); + } + } + *self = EventBuffer::Sink(dest) + } +} diff --git a/components/shared/webxr/frame.rs b/components/shared/webxr/frame.rs new file mode 100644 index 00000000000..2589953ecba --- /dev/null +++ b/components/shared/webxr/frame.rs @@ -0,0 +1,60 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::Floor; +use crate::HitTestId; +use crate::HitTestResult; +use crate::InputFrame; +use crate::Native; +use crate::SubImages; +use crate::Viewer; +use crate::Viewports; +use crate::Views; + +use euclid::RigidTransform3D; + +/// The per-frame data that is provided by the device. +/// https://www.w3.org/TR/webxr/#xrframe +// TODO: other fields? +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct Frame { + /// The pose information of the viewer + pub pose: Option, + /// Frame information for each connected input source + pub inputs: Vec, + + /// Events that occur with the frame. + pub events: Vec, + + /// The subimages to render to + pub sub_images: Vec, + + /// The hit test results for this frame, if any + pub hit_test_results: Vec, + + /// The average point in time this XRFrame is expected to be displayed on the devices' display + pub predicted_display_time: f64, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum FrameUpdateEvent { + UpdateFloorTransform(Option>), + UpdateViewports(Viewports), + HitTestSourceAdded(HitTestId), +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct ViewerPose { + /// The transform from the viewer to native coordinates + /// + /// This is equivalent to the pose of the viewer in native coordinates. + /// This is the inverse of the view matrix. + pub transform: RigidTransform3D, + + // The various views + pub views: Views, +} diff --git a/components/shared/webxr/hand.rs b/components/shared/webxr/hand.rs new file mode 100644 index 00000000000..fa6e8fafe80 --- /dev/null +++ b/components/shared/webxr/hand.rs @@ -0,0 +1,122 @@ +use crate::Native; +use euclid::RigidTransform3D; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct HandSpace; + +#[derive(Clone, Debug, Default)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct Hand { + pub wrist: Option, + pub thumb_metacarpal: Option, + pub thumb_phalanx_proximal: Option, + pub thumb_phalanx_distal: Option, + pub thumb_phalanx_tip: Option, + pub index: Finger, + pub middle: Finger, + pub ring: Finger, + pub little: Finger, +} + +#[derive(Clone, Debug, Default)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct Finger { + pub metacarpal: Option, + pub phalanx_proximal: Option, + pub phalanx_intermediate: Option, + pub phalanx_distal: Option, + pub phalanx_tip: Option, +} + +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct JointFrame { + pub pose: RigidTransform3D, + pub radius: f32, +} + +impl Default for JointFrame { + fn default() -> Self { + Self { + pose: RigidTransform3D::identity(), + radius: 0., + } + } +} + +impl Hand { + pub fn map(&self, map: impl (Fn(&Option, Joint) -> Option) + Copy) -> Hand { + Hand { + wrist: map(&self.wrist, Joint::Wrist), + thumb_metacarpal: map(&self.thumb_metacarpal, Joint::ThumbMetacarpal), + thumb_phalanx_proximal: map(&self.thumb_phalanx_proximal, Joint::ThumbPhalanxProximal), + thumb_phalanx_distal: map(&self.thumb_phalanx_distal, Joint::ThumbPhalanxDistal), + thumb_phalanx_tip: map(&self.thumb_phalanx_tip, Joint::ThumbPhalanxTip), + index: self.index.map(|f, j| map(f, Joint::Index(j))), + middle: self.middle.map(|f, j| map(f, Joint::Middle(j))), + ring: self.ring.map(|f, j| map(f, Joint::Ring(j))), + little: self.little.map(|f, j| map(f, Joint::Little(j))), + } + } + + pub fn get(&self, joint: Joint) -> Option<&J> { + match joint { + Joint::Wrist => self.wrist.as_ref(), + Joint::ThumbMetacarpal => self.thumb_metacarpal.as_ref(), + Joint::ThumbPhalanxProximal => self.thumb_phalanx_proximal.as_ref(), + Joint::ThumbPhalanxDistal => self.thumb_phalanx_distal.as_ref(), + Joint::ThumbPhalanxTip => self.thumb_phalanx_tip.as_ref(), + Joint::Index(f) => self.index.get(f), + Joint::Middle(f) => self.middle.get(f), + Joint::Ring(f) => self.ring.get(f), + Joint::Little(f) => self.little.get(f), + } + } +} + +impl Finger { + pub fn map(&self, map: impl (Fn(&Option, FingerJoint) -> Option) + Copy) -> Finger { + Finger { + metacarpal: map(&self.metacarpal, FingerJoint::Metacarpal), + phalanx_proximal: map(&self.phalanx_proximal, FingerJoint::PhalanxProximal), + phalanx_intermediate: map(&self.phalanx_intermediate, FingerJoint::PhalanxIntermediate), + phalanx_distal: map(&self.phalanx_distal, FingerJoint::PhalanxDistal), + phalanx_tip: map(&self.phalanx_tip, FingerJoint::PhalanxTip), + } + } + + pub fn get(&self, joint: FingerJoint) -> Option<&J> { + match joint { + FingerJoint::Metacarpal => self.metacarpal.as_ref(), + FingerJoint::PhalanxProximal => self.phalanx_proximal.as_ref(), + FingerJoint::PhalanxIntermediate => self.phalanx_intermediate.as_ref(), + FingerJoint::PhalanxDistal => self.phalanx_distal.as_ref(), + FingerJoint::PhalanxTip => self.phalanx_tip.as_ref(), + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum FingerJoint { + Metacarpal, + PhalanxProximal, + PhalanxIntermediate, + PhalanxDistal, + PhalanxTip, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum Joint { + Wrist, + ThumbMetacarpal, + ThumbPhalanxProximal, + ThumbPhalanxDistal, + ThumbPhalanxTip, + Index(FingerJoint), + Middle(FingerJoint), + Ring(FingerJoint), + Little(FingerJoint), +} diff --git a/components/shared/webxr/hittest.rs b/components/shared/webxr/hittest.rs new file mode 100644 index 00000000000..3e56ff8c357 --- /dev/null +++ b/components/shared/webxr/hittest.rs @@ -0,0 +1,179 @@ +use crate::ApiSpace; +use crate::Native; +use crate::Space; +use euclid::Point3D; +use euclid::RigidTransform3D; +use euclid::Rotation3D; +use euclid::Vector3D; +use std::f32::EPSILON; +use std::iter::FromIterator; + +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +/// https://immersive-web.github.io/hit-test/#xrray +pub struct Ray { + /// The origin of the ray + pub origin: Vector3D, + /// The direction of the ray. Must be normalized. + pub direction: Vector3D, +} + +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +/// https://immersive-web.github.io/hit-test/#enumdef-xrhittesttrackabletype +pub enum EntityType { + Point, + Plane, + Mesh, +} + +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +/// https://immersive-web.github.io/hit-test/#dictdef-xrhittestoptionsinit +pub struct HitTestSource { + pub id: HitTestId, + pub space: Space, + pub ray: Ray, + pub types: EntityTypes, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct HitTestId(pub u32); + +#[derive(Copy, Clone, Debug, Default)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +/// Vec, but better +pub struct EntityTypes { + pub point: bool, + pub plane: bool, + pub mesh: bool, +} + +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct HitTestResult { + pub id: HitTestId, + pub space: RigidTransform3D, +} + +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +/// The coordinate space of a hit test result +pub struct HitTestSpace; + +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct Triangle { + pub first: Point3D, + pub second: Point3D, + pub third: Point3D, +} + +impl EntityTypes { + pub fn is_type(self, ty: EntityType) -> bool { + match ty { + EntityType::Point => self.point, + EntityType::Plane => self.plane, + EntityType::Mesh => self.mesh, + } + } + + pub fn add_type(&mut self, ty: EntityType) { + match ty { + EntityType::Point => self.point = true, + EntityType::Plane => self.plane = true, + EntityType::Mesh => self.mesh = true, + } + } +} + +impl FromIterator for EntityTypes { + fn from_iter(iter: T) -> Self + where + T: IntoIterator, + { + iter.into_iter().fold(Default::default(), |mut acc, e| { + acc.add_type(e); + acc + }) + } +} + +impl Triangle { + /// https://en.wikipedia.org/wiki/M%C3%B6ller%E2%80%93Trumbore_intersection_algorithm + pub fn intersect( + self, + ray: Ray, + ) -> Option> { + let Triangle { + first: v0, + second: v1, + third: v2, + } = self; + + let edge1 = v1 - v0; + let edge2 = v2 - v0; + + let h = ray.direction.cross(edge2); + let a = edge1.dot(h); + if a > -EPSILON && a < EPSILON { + // ray is parallel to triangle + return None; + } + + let f = 1. / a; + + let s = ray.origin - v0.to_vector(); + + // barycentric coordinate of intersection point u + let u = f * s.dot(h); + // barycentric coordinates have range (0, 1) + if u < 0. || u > 1. { + // the intersection is outside the triangle + return None; + } + + let q = s.cross(edge1); + // barycentric coordinate of intersection point v + let v = f * ray.direction.dot(q); + + // barycentric coordinates have range (0, 1) + // and their sum must not be greater than 1 + if v < 0. || u + v > 1. { + // the intersection is outside the triangle + return None; + } + + let t = f * edge2.dot(q); + + if t > EPSILON { + let origin = ray.origin + ray.direction * t; + + // this is not part of the Möller-Trumbore algorithm, the hit test spec + // requires it has an orientation such that the Y axis points along + // the triangle normal + let normal = edge1.cross(edge2).normalize(); + let y = Vector3D::new(0., 1., 0.); + let dot = normal.dot(y); + let rotation = if dot > -EPSILON && dot < EPSILON { + // vectors are parallel, return the vector itself + // XXXManishearth it's possible for the vectors to be + // antiparallel, unclear if normals need to be flipped + Rotation3D::identity() + } else { + let axis = normal.cross(y); + let cos = normal.dot(y); + // This is Rotation3D::around_axis(axis.normalize(), theta), however + // that is just Rotation3D::quaternion(axis.normalize().xyz * sin, cos), + // which is Rotation3D::quaternion(cross, dot) + Rotation3D::quaternion(axis.x, axis.y, axis.z, cos) + }; + + return Some(RigidTransform3D::new(rotation, origin)); + } + + // triangle is behind ray + None + } +} diff --git a/components/shared/webxr/input.rs b/components/shared/webxr/input.rs new file mode 100644 index 00000000000..9fcd2a18554 --- /dev/null +++ b/components/shared/webxr/input.rs @@ -0,0 +1,74 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::Hand; +use crate::Input; +use crate::JointFrame; +use crate::Native; + +use euclid::RigidTransform3D; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct InputId(pub u32); + +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum Handedness { + None, + Left, + Right, +} + +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum TargetRayMode { + Gaze, + TrackedPointer, + Screen, + TransientPointer, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct InputSource { + pub handedness: Handedness, + pub target_ray_mode: TargetRayMode, + pub id: InputId, + pub supports_grip: bool, + pub hand_support: Option>, + pub profiles: Vec, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct InputFrame { + pub id: InputId, + pub target_ray_origin: Option>, + pub grip_origin: Option>, + pub pressed: bool, + pub hand: Option>>, + pub squeezed: bool, + pub button_values: Vec, + pub axis_values: Vec, + pub input_changed: bool, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum SelectEvent { + /// Selection started + Start, + /// Selection ended *without* it being a contiguous select event + End, + /// Selection ended *with* it being a contiguous select event + Select, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum SelectKind { + Select, + Squeeze, +} diff --git a/components/shared/webxr/layer.rs b/components/shared/webxr/layer.rs new file mode 100644 index 00000000000..b0a607f290f --- /dev/null +++ b/components/shared/webxr/layer.rs @@ -0,0 +1,296 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::Error; +use crate::Viewport; +use crate::Viewports; + +use euclid::Rect; +use euclid::Size2D; + +use std::fmt::Debug; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering; + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))] +pub struct ContextId(pub u64); + +#[cfg(feature = "ipc")] +use serde::{Deserialize, Serialize}; + +pub trait GLTypes { + type Device; + type Context; + type Bindings; +} + +pub trait GLContexts { + fn bindings(&mut self, device: &GL::Device, context_id: ContextId) -> Option<&GL::Bindings>; + fn context(&mut self, device: &GL::Device, context_id: ContextId) -> Option<&mut GL::Context>; +} + +impl GLTypes for () { + type Bindings = (); + type Device = (); + type Context = (); +} + +impl GLContexts<()> for () { + fn context(&mut self, _: &(), _: ContextId) -> Option<&mut ()> { + Some(self) + } + + fn bindings(&mut self, _: &(), _: ContextId) -> Option<&()> { + Some(self) + } +} + +pub trait LayerGrandManagerAPI { + fn create_layer_manager(&self, factory: LayerManagerFactory) + -> Result; + + fn clone_layer_grand_manager(&self) -> LayerGrandManager; +} + +pub struct LayerGrandManager(Box>); + +impl Clone for LayerGrandManager { + fn clone(&self) -> Self { + self.0.clone_layer_grand_manager() + } +} + +impl Debug for LayerGrandManager { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + "LayerGrandManager(...)".fmt(fmt) + } +} + +impl LayerGrandManager { + pub fn new(grand_manager: GM) -> LayerGrandManager + where + GM: 'static + Send + LayerGrandManagerAPI, + { + LayerGrandManager(Box::new(grand_manager)) + } + + pub fn create_layer_manager(&self, factory: F) -> Result + where + F: 'static + Send + FnOnce(&mut GL::Device, &mut dyn GLContexts) -> Result, + M: 'static + LayerManagerAPI, + { + self.0 + .create_layer_manager(LayerManagerFactory::new(factory)) + } +} + +pub trait LayerManagerAPI { + fn create_layer( + &mut self, + device: &mut GL::Device, + contexts: &mut dyn GLContexts, + context_id: ContextId, + init: LayerInit, + ) -> Result; + + fn destroy_layer( + &mut self, + device: &mut GL::Device, + contexts: &mut dyn GLContexts, + context_id: ContextId, + layer_id: LayerId, + ); + + fn layers(&self) -> &[(ContextId, LayerId)]; + + fn begin_frame( + &mut self, + device: &mut GL::Device, + contexts: &mut dyn GLContexts, + layers: &[(ContextId, LayerId)], + ) -> Result, Error>; + + fn end_frame( + &mut self, + device: &mut GL::Device, + contexts: &mut dyn GLContexts, + layers: &[(ContextId, LayerId)], + ) -> Result<(), Error>; +} + +pub struct LayerManager(Box>); + +impl Debug for LayerManager { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + "LayerManager(...)".fmt(fmt) + } +} + +impl LayerManager { + pub fn create_layer( + &mut self, + context_id: ContextId, + init: LayerInit, + ) -> Result { + self.0.create_layer(&mut (), &mut (), context_id, init) + } + + pub fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId) { + self.0.destroy_layer(&mut (), &mut (), context_id, layer_id); + } + + pub fn begin_frame( + &mut self, + layers: &[(ContextId, LayerId)], + ) -> Result, Error> { + self.0.begin_frame(&mut (), &mut (), layers) + } + + pub fn end_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Result<(), Error> { + self.0.end_frame(&mut (), &mut (), layers) + } +} + +impl LayerManager { + pub fn new(manager: M) -> LayerManager + where + M: 'static + Send + LayerManagerAPI<()>, + { + LayerManager(Box::new(manager)) + } +} + +impl Drop for LayerManager { + fn drop(&mut self) { + log::debug!("Dropping LayerManager"); + for (context_id, layer_id) in self.0.layers().to_vec() { + self.destroy_layer(context_id, layer_id); + } + } +} + +pub struct LayerManagerFactory( + Box< + dyn Send + + FnOnce( + &mut GL::Device, + &mut dyn GLContexts, + ) -> Result>, Error>, + >, +); + +impl Debug for LayerManagerFactory { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + "LayerManagerFactory(...)".fmt(fmt) + } +} + +impl LayerManagerFactory { + pub fn new(factory: F) -> LayerManagerFactory + where + F: 'static + Send + FnOnce(&mut GL::Device, &mut dyn GLContexts) -> Result, + M: 'static + LayerManagerAPI, + { + LayerManagerFactory(Box::new(move |device, contexts| { + Ok(Box::new(factory(device, contexts)?)) + })) + } + + pub fn build( + self, + device: &mut GL::Device, + contexts: &mut dyn GLContexts, + ) -> Result>, Error> { + (self.0)(device, contexts) + } +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))] +pub struct LayerId(usize); + +static NEXT_LAYER_ID: AtomicUsize = AtomicUsize::new(0); + +impl LayerId { + pub fn new() -> LayerId { + LayerId(NEXT_LAYER_ID.fetch_add(1, Ordering::SeqCst)) + } +} + +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))] +pub enum LayerInit { + // https://www.w3.org/TR/webxr/#dictdef-xrwebgllayerinit + WebGLLayer { + antialias: bool, + depth: bool, + stencil: bool, + alpha: bool, + ignore_depth_values: bool, + framebuffer_scale_factor: f32, + }, + // https://immersive-web.github.io/layers/#xrprojectionlayerinittype + ProjectionLayer { + depth: bool, + stencil: bool, + alpha: bool, + scale_factor: f32, + }, + // TODO: other layer types +} + +impl LayerInit { + pub fn texture_size(&self, viewports: &Viewports) -> Size2D { + match self { + LayerInit::WebGLLayer { + framebuffer_scale_factor: scale, + .. + } + | LayerInit::ProjectionLayer { + scale_factor: scale, + .. + } => { + let native_size = viewports + .viewports + .iter() + .fold(Rect::zero(), |acc, view| acc.union(view)) + .size; + (native_size.to_f32() * *scale).to_i32() + } + } + } +} + +/// https://immersive-web.github.io/layers/#enumdef-xrlayerlayout +#[derive(Copy, Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))] +pub enum LayerLayout { + // TODO: Default + // Allocates one texture + Mono, + // Allocates one texture, which is split in half vertically, giving two subimages + StereoLeftRight, + // Allocates one texture, which is split in half horizonally, giving two subimages + StereoTopBottom, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))] +pub struct SubImages { + pub layer_id: LayerId, + pub sub_image: Option, + pub view_sub_images: Vec, +} + +/// https://immersive-web.github.io/layers/#xrsubimagetype +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))] +pub struct SubImage { + pub color_texture: u32, + // TODO: make this Option + pub depth_stencil_texture: Option, + pub texture_array_index: Option, + pub viewport: Rect, +} diff --git a/components/shared/webxr/lib.rs b/components/shared/webxr/lib.rs new file mode 100644 index 00000000000..9acad34e0e5 --- /dev/null +++ b/components/shared/webxr/lib.rs @@ -0,0 +1,175 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +//! This crate defines the Rust API for WebXR. It is implemented by the `webxr` crate. + +mod device; +mod error; +mod events; +mod frame; +mod hand; +mod hittest; +mod input; +mod layer; +mod mock; +mod registry; +mod session; +mod space; +pub mod util; +mod view; + +pub use device::DeviceAPI; +pub use device::DiscoveryAPI; + +pub use error::Error; + +pub use events::Event; +pub use events::EventBuffer; +pub use events::Visibility; + +pub use frame::Frame; +pub use frame::FrameUpdateEvent; +pub use frame::ViewerPose; + +pub use hand::Finger; +pub use hand::FingerJoint; +pub use hand::Hand; +pub use hand::HandSpace; +pub use hand::Joint; +pub use hand::JointFrame; + +pub use hittest::EntityType; +pub use hittest::EntityTypes; +pub use hittest::HitTestId; +pub use hittest::HitTestResult; +pub use hittest::HitTestSource; +pub use hittest::HitTestSpace; +pub use hittest::Ray; +pub use hittest::Triangle; + +pub use input::Handedness; +pub use input::InputFrame; +pub use input::InputId; +pub use input::InputSource; +pub use input::SelectEvent; +pub use input::SelectKind; +pub use input::TargetRayMode; + +pub use layer::ContextId; +pub use layer::GLContexts; +pub use layer::GLTypes; +pub use layer::LayerGrandManager; +pub use layer::LayerGrandManagerAPI; +pub use layer::LayerId; +pub use layer::LayerInit; +pub use layer::LayerLayout; +pub use layer::LayerManager; +pub use layer::LayerManagerAPI; +pub use layer::LayerManagerFactory; +pub use layer::SubImage; +pub use layer::SubImages; + +pub use mock::MockButton; +pub use mock::MockButtonType; +pub use mock::MockDeviceInit; +pub use mock::MockDeviceMsg; +pub use mock::MockDiscoveryAPI; +pub use mock::MockInputInit; +pub use mock::MockInputMsg; +pub use mock::MockRegion; +pub use mock::MockViewInit; +pub use mock::MockViewsInit; +pub use mock::MockWorld; + +pub use registry::MainThreadRegistry; +pub use registry::MainThreadWaker; +pub use registry::Registry; + +pub use session::EnvironmentBlendMode; +pub use session::MainThreadSession; +pub use session::Quitter; +pub use session::Session; +pub use session::SessionBuilder; +pub use session::SessionId; +pub use session::SessionInit; +pub use session::SessionMode; +pub use session::SessionThread; + +pub use space::ApiSpace; +pub use space::BaseSpace; +pub use space::Space; + +pub use view::Capture; +pub use view::CubeBack; +pub use view::CubeBottom; +pub use view::CubeLeft; +pub use view::CubeRight; +pub use view::CubeTop; +pub use view::Display; +pub use view::Floor; +pub use view::Input; +pub use view::LeftEye; +pub use view::Native; +pub use view::RightEye; +pub use view::SomeEye; +pub use view::View; +pub use view::Viewer; +pub use view::Viewport; +pub use view::Viewports; +pub use view::Views; +pub use view::CUBE_BACK; +pub use view::CUBE_BOTTOM; +pub use view::CUBE_LEFT; +pub use view::CUBE_RIGHT; +pub use view::CUBE_TOP; +pub use view::LEFT_EYE; +pub use view::RIGHT_EYE; +pub use view::VIEWER; + +#[cfg(feature = "ipc")] +use std::thread; + +use std::time::Duration; + +#[cfg(feature = "ipc")] +pub use ipc_channel::ipc::IpcSender as Sender; + +#[cfg(feature = "ipc")] +pub use ipc_channel::ipc::IpcReceiver as Receiver; + +#[cfg(feature = "ipc")] +pub use ipc_channel::ipc::channel; + +#[cfg(not(feature = "ipc"))] +pub use std::sync::mpsc::{Receiver, RecvTimeoutError, Sender}; + +#[cfg(not(feature = "ipc"))] +pub fn channel() -> Result<(Sender, Receiver), ()> { + Ok(std::sync::mpsc::channel()) +} + +#[cfg(not(feature = "ipc"))] +pub fn recv_timeout(receiver: &Receiver, timeout: Duration) -> Result { + receiver.recv_timeout(timeout) +} + +#[cfg(feature = "ipc")] +pub fn recv_timeout( + receiver: &Receiver, + timeout: Duration, +) -> Result +where + T: serde::Serialize + for<'a> serde::Deserialize<'a>, +{ + // Sigh, polling, sigh. + let mut delay = timeout / 1000; + while delay < timeout { + if let Ok(msg) = receiver.try_recv() { + return Ok(msg); + } + thread::sleep(delay); + delay = delay * 2; + } + receiver.try_recv() +} diff --git a/components/shared/webxr/mock.rs b/components/shared/webxr/mock.rs new file mode 100644 index 00000000000..91c15bae44b --- /dev/null +++ b/components/shared/webxr/mock.rs @@ -0,0 +1,146 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::DiscoveryAPI; +use crate::Display; +use crate::EntityType; +use crate::Error; +use crate::Floor; +use crate::Handedness; +use crate::Input; +use crate::InputId; +use crate::InputSource; +use crate::LeftEye; +use crate::Native; +use crate::Receiver; +use crate::RightEye; +use crate::SelectEvent; +use crate::SelectKind; +use crate::Sender; +use crate::TargetRayMode; +use crate::Triangle; +use crate::Viewer; +use crate::Viewport; +use crate::Visibility; + +use euclid::{Point2D, Rect, RigidTransform3D, Transform3D}; + +#[cfg(feature = "ipc")] +use serde::{Deserialize, Serialize}; + +/// A trait for discovering mock XR devices +pub trait MockDiscoveryAPI: 'static { + fn simulate_device_connection( + &mut self, + init: MockDeviceInit, + receiver: Receiver, + ) -> Result>, Error>; +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct MockDeviceInit { + pub floor_origin: Option>, + pub supports_inline: bool, + pub supports_vr: bool, + pub supports_ar: bool, + pub viewer_origin: Option>, + pub views: MockViewsInit, + pub supported_features: Vec, + pub world: Option, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct MockViewInit { + pub transform: RigidTransform3D, + pub projection: Transform3D, + pub viewport: Rect, + /// field of view values, in radians + pub fov: Option<(f32, f32, f32, f32)>, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum MockViewsInit { + Mono(MockViewInit), + Stereo(MockViewInit, MockViewInit), +} + +#[derive(Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum MockDeviceMsg { + SetViewerOrigin(Option>), + SetFloorOrigin(Option>), + SetViews(MockViewsInit), + AddInputSource(MockInputInit), + MessageInputSource(InputId, MockInputMsg), + VisibilityChange(Visibility), + SetWorld(MockWorld), + ClearWorld, + Disconnect(Sender<()>), + SetBoundsGeometry(Vec>), + SimulateResetPose, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct MockInputInit { + pub source: InputSource, + pub pointer_origin: Option>, + pub grip_origin: Option>, + pub supported_buttons: Vec, +} + +#[derive(Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum MockInputMsg { + SetHandedness(Handedness), + SetTargetRayMode(TargetRayMode), + SetProfiles(Vec), + SetPointerOrigin(Option>), + SetGripOrigin(Option>), + /// Note: SelectEvent::Select here refers to a complete Select event, + /// not just the end event, i.e. it refers to + /// https://immersive-web.github.io/webxr-test-api/#dom-fakexrinputcontroller-simulateselect + TriggerSelect(SelectKind, SelectEvent), + Disconnect, + Reconnect, + SetSupportedButtons(Vec), + UpdateButtonState(MockButton), +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct MockRegion { + pub faces: Vec, + pub ty: EntityType, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct MockWorld { + pub regions: Vec, +} + +#[derive(Clone, Debug, PartialEq)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum MockButtonType { + Grip, + Touchpad, + Thumbstick, + OptionalButton, + OptionalThumbstick, +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct MockButton { + pub button_type: MockButtonType, + pub pressed: bool, + pub touched: bool, + pub pressed_value: f32, + pub x_value: f32, + pub y_value: f32, +} diff --git a/components/shared/webxr/registry.rs b/components/shared/webxr/registry.rs new file mode 100644 index 00000000000..337bb80a8a2 --- /dev/null +++ b/components/shared/webxr/registry.rs @@ -0,0 +1,262 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::DiscoveryAPI; +use crate::Error; +use crate::Frame; +use crate::GLTypes; +use crate::LayerGrandManager; +use crate::MainThreadSession; +use crate::MockDeviceInit; +use crate::MockDeviceMsg; +use crate::MockDiscoveryAPI; +use crate::Receiver; +use crate::Sender; +use crate::Session; +use crate::SessionBuilder; +use crate::SessionId; +use crate::SessionInit; +use crate::SessionMode; + +use log::warn; + +#[cfg(feature = "ipc")] +use serde::{Deserialize, Serialize}; + +#[derive(Clone)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct Registry { + sender: Sender, + waker: MainThreadWakerImpl, +} + +pub struct MainThreadRegistry { + discoveries: Vec>>, + sessions: Vec>, + mocks: Vec>>, + sender: Sender, + receiver: Receiver, + waker: MainThreadWakerImpl, + grand_manager: LayerGrandManager, + next_session_id: u32, +} + +pub trait MainThreadWaker: 'static + Send { + fn clone_box(&self) -> Box; + fn wake(&self); +} + +impl Clone for Box { + fn clone(&self) -> Self { + self.clone_box() + } +} + +#[derive(Clone)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +struct MainThreadWakerImpl { + #[cfg(feature = "ipc")] + sender: Sender<()>, + #[cfg(not(feature = "ipc"))] + waker: Box, +} + +#[cfg(feature = "ipc")] +impl MainThreadWakerImpl { + fn new(waker: Box) -> Result { + let (sender, receiver) = crate::channel().or(Err(Error::CommunicationError))?; + ipc_channel::router::ROUTER.add_typed_route(receiver, Box::new(move |_| waker.wake())); + Ok(MainThreadWakerImpl { sender }) + } + + fn wake(&self) { + let _ = self.sender.send(()); + } +} + +#[cfg(not(feature = "ipc"))] +impl MainThreadWakerImpl { + fn new(waker: Box) -> Result { + Ok(MainThreadWakerImpl { waker }) + } + + pub fn wake(&self) { + self.waker.wake() + } +} + +impl Registry { + pub fn supports_session(&mut self, mode: SessionMode, dest: Sender>) { + let _ = self.sender.send(RegistryMsg::SupportsSession(mode, dest)); + self.waker.wake(); + } + + pub fn request_session( + &mut self, + mode: SessionMode, + init: SessionInit, + dest: Sender>, + animation_frame_handler: Sender, + ) { + let _ = self.sender.send(RegistryMsg::RequestSession( + mode, + init, + dest, + animation_frame_handler, + )); + self.waker.wake(); + } + + pub fn simulate_device_connection( + &mut self, + init: MockDeviceInit, + dest: Sender, Error>>, + ) { + let _ = self + .sender + .send(RegistryMsg::SimulateDeviceConnection(init, dest)); + self.waker.wake(); + } +} + +impl MainThreadRegistry { + pub fn new( + waker: Box, + grand_manager: LayerGrandManager, + ) -> Result { + let (sender, receiver) = crate::channel().or(Err(Error::CommunicationError))?; + let discoveries = Vec::new(); + let sessions = Vec::new(); + let mocks = Vec::new(); + let waker = MainThreadWakerImpl::new(waker)?; + Ok(MainThreadRegistry { + discoveries, + sessions, + mocks, + sender, + receiver, + waker, + grand_manager, + next_session_id: 0, + }) + } + + pub fn registry(&self) -> Registry { + Registry { + sender: self.sender.clone(), + waker: self.waker.clone(), + } + } + + pub fn register(&mut self, discovery: D) + where + D: DiscoveryAPI, + { + self.discoveries.push(Box::new(discovery)); + } + + pub fn register_mock(&mut self, discovery: D) + where + D: MockDiscoveryAPI, + { + self.mocks.push(Box::new(discovery)); + } + + pub fn run_on_main_thread(&mut self, session: S) + where + S: MainThreadSession, + { + self.sessions.push(Box::new(session)); + } + + pub fn run_one_frame(&mut self) { + while let Ok(msg) = self.receiver.try_recv() { + self.handle_msg(msg); + } + for session in &mut self.sessions { + session.run_one_frame(); + } + self.sessions.retain(|session| session.running()); + } + + pub fn running(&self) -> bool { + self.sessions.iter().any(|session| session.running()) + } + + fn handle_msg(&mut self, msg: RegistryMsg) { + match msg { + RegistryMsg::SupportsSession(mode, dest) => { + let _ = dest.send(self.supports_session(mode)); + } + RegistryMsg::RequestSession(mode, init, dest, raf_sender) => { + let _ = dest.send(self.request_session(mode, init, raf_sender)); + } + RegistryMsg::SimulateDeviceConnection(init, dest) => { + let _ = dest.send(self.simulate_device_connection(init)); + } + } + } + + fn supports_session(&mut self, mode: SessionMode) -> Result<(), Error> { + for discovery in &self.discoveries { + if discovery.supports_session(mode) { + return Ok(()); + } + } + Err(Error::NoMatchingDevice) + } + + fn request_session( + &mut self, + mode: SessionMode, + init: SessionInit, + raf_sender: Sender, + ) -> Result { + for discovery in &mut self.discoveries { + if discovery.supports_session(mode) { + let raf_sender = raf_sender.clone(); + let id = SessionId(self.next_session_id); + self.next_session_id += 1; + let xr = SessionBuilder::new( + &mut self.sessions, + raf_sender, + self.grand_manager.clone(), + id, + ); + match discovery.request_session(mode, &init, xr) { + Ok(session) => return Ok(session), + Err(err) => warn!("XR device error {:?}", err), + } + } + } + warn!("no device could support the session"); + Err(Error::NoMatchingDevice) + } + + fn simulate_device_connection( + &mut self, + init: MockDeviceInit, + ) -> Result, Error> { + for mock in &mut self.mocks { + let (sender, receiver) = crate::channel().or(Err(Error::CommunicationError))?; + if let Ok(discovery) = mock.simulate_device_connection(init.clone(), receiver) { + self.discoveries.insert(0, discovery); + return Ok(sender); + } + } + Err(Error::NoMatchingDevice) + } +} + +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +enum RegistryMsg { + RequestSession( + SessionMode, + SessionInit, + Sender>, + Sender, + ), + SupportsSession(SessionMode, Sender>), + SimulateDeviceConnection(MockDeviceInit, Sender, Error>>), +} diff --git a/components/shared/webxr/session.rs b/components/shared/webxr/session.rs new file mode 100644 index 00000000000..be731b8c243 --- /dev/null +++ b/components/shared/webxr/session.rs @@ -0,0 +1,531 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::channel; +use crate::ContextId; +use crate::DeviceAPI; +use crate::Error; +use crate::Event; +use crate::Floor; +use crate::Frame; +use crate::FrameUpdateEvent; +use crate::HitTestId; +use crate::HitTestSource; +use crate::InputSource; +use crate::LayerGrandManager; +use crate::LayerId; +use crate::LayerInit; +use crate::Native; +use crate::Receiver; +use crate::Sender; +use crate::Viewport; +use crate::Viewports; + +use euclid::Point2D; +use euclid::Rect; +use euclid::RigidTransform3D; +use euclid::Size2D; + +use log::warn; + +use std::thread; +use std::time::Duration; + +#[cfg(feature = "ipc")] +use serde::{Deserialize, Serialize}; + +// How long to wait for an rAF. +static TIMEOUT: Duration = Duration::from_millis(5); + +/// https://www.w3.org/TR/webxr/#xrsessionmode-enum +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum SessionMode { + Inline, + ImmersiveVR, + ImmersiveAR, +} + +/// https://immersive-web.github.io/webxr/#dictdef-xrsessioninit +#[derive(Clone, Debug, Eq, PartialEq)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct SessionInit { + pub required_features: Vec, + pub optional_features: Vec, + /// Secondary views are enabled with the `secondary-view` feature + /// but for performance reasons we also ask users to enable this pref + /// for now. + pub first_person_observer_view: bool, +} + +impl SessionInit { + /// Helper function for validating a list of requested features against + /// a list of supported features for a given mode + pub fn validate(&self, mode: SessionMode, supported: &[String]) -> Result, Error> { + for f in &self.required_features { + // viewer and local in immersive are granted by default + // https://immersive-web.github.io/webxr/#default-features + if f == "viewer" || (f == "local" && mode != SessionMode::Inline) { + continue; + } + + if !supported.contains(f) { + return Err(Error::UnsupportedFeature(f.into())); + } + } + let mut granted = self.required_features.clone(); + for f in &self.optional_features { + if f == "viewer" + || (f == "local" && mode != SessionMode::Inline) + || supported.contains(f) + { + granted.push(f.clone()); + } + } + + Ok(granted) + } + + pub fn feature_requested(&self, f: &str) -> bool { + self.required_features + .iter() + .chain(self.optional_features.iter()) + .find(|x| *x == f) + .is_some() + } +} + +/// https://immersive-web.github.io/webxr-ar-module/#xrenvironmentblendmode-enum +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum EnvironmentBlendMode { + Opaque, + AlphaBlend, + Additive, +} + +// The messages that are sent from the content thread to the session thread. +#[derive(Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +enum SessionMsg { + CreateLayer(ContextId, LayerInit, Sender>), + DestroyLayer(ContextId, LayerId), + SetLayers(Vec<(ContextId, LayerId)>), + SetEventDest(Sender), + UpdateClipPlanes(/* near */ f32, /* far */ f32), + StartRenderLoop, + RenderAnimationFrame, + RequestHitTest(HitTestSource), + CancelHitTest(HitTestId), + UpdateFrameRate(f32, Sender), + Quit, + GetBoundsGeometry(Sender>>>), +} + +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +#[derive(Clone)] +pub struct Quitter { + sender: Sender, +} + +impl Quitter { + pub fn quit(&self) { + let _ = self.sender.send(SessionMsg::Quit); + } +} + +/// An object that represents an XR session. +/// This is owned by the content thread. +/// https://www.w3.org/TR/webxr/#xrsession-interface +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct Session { + floor_transform: Option>, + viewports: Viewports, + sender: Sender, + environment_blend_mode: EnvironmentBlendMode, + initial_inputs: Vec, + granted_features: Vec, + id: SessionId, + supported_frame_rates: Vec, +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))] +pub struct SessionId(pub(crate) u32); + +impl Session { + pub fn id(&self) -> SessionId { + self.id + } + + pub fn floor_transform(&self) -> Option> { + self.floor_transform.clone() + } + + pub fn reference_space_bounds(&self) -> Option>> { + let (sender, receiver) = channel().ok()?; + let _ = self.sender.send(SessionMsg::GetBoundsGeometry(sender)); + receiver.recv().ok()? + } + + pub fn initial_inputs(&self) -> &[InputSource] { + &self.initial_inputs + } + + pub fn environment_blend_mode(&self) -> EnvironmentBlendMode { + self.environment_blend_mode + } + + pub fn viewports(&self) -> &[Rect] { + &self.viewports.viewports + } + + /// A resolution large enough to contain all the viewports. + /// https://immersive-web.github.io/webxr/#recommended-webgl-framebuffer-resolution + /// + /// Returns None if the session is inline + pub fn recommended_framebuffer_resolution(&self) -> Option> { + self.viewports() + .iter() + .fold(None::>, |acc, vp| { + Some(acc.map(|a| a.union(vp)).unwrap_or(*vp)) + }) + .map(|rect| Size2D::new(rect.max_x(), rect.max_y())) + } + + pub fn create_layer(&self, context_id: ContextId, init: LayerInit) -> Result { + let (sender, receiver) = channel().map_err(|_| Error::CommunicationError)?; + let _ = self + .sender + .send(SessionMsg::CreateLayer(context_id, init, sender)); + receiver.recv().map_err(|_| Error::CommunicationError)? + } + + /// Destroy a layer + pub fn destroy_layer(&self, context_id: ContextId, layer_id: LayerId) { + let _ = self + .sender + .send(SessionMsg::DestroyLayer(context_id, layer_id)); + } + + pub fn set_layers(&self, layers: Vec<(ContextId, LayerId)>) { + let _ = self.sender.send(SessionMsg::SetLayers(layers)); + } + + pub fn start_render_loop(&mut self) { + let _ = self.sender.send(SessionMsg::StartRenderLoop); + } + + pub fn update_clip_planes(&mut self, near: f32, far: f32) { + let _ = self.sender.send(SessionMsg::UpdateClipPlanes(near, far)); + } + + pub fn set_event_dest(&mut self, dest: Sender) { + let _ = self.sender.send(SessionMsg::SetEventDest(dest)); + } + + pub fn render_animation_frame(&mut self) { + let _ = self.sender.send(SessionMsg::RenderAnimationFrame); + } + + pub fn end_session(&mut self) { + let _ = self.sender.send(SessionMsg::Quit); + } + + pub fn apply_event(&mut self, event: FrameUpdateEvent) { + match event { + FrameUpdateEvent::UpdateFloorTransform(floor) => self.floor_transform = floor, + FrameUpdateEvent::UpdateViewports(vp) => self.viewports = vp, + FrameUpdateEvent::HitTestSourceAdded(_) => (), + } + } + + pub fn granted_features(&self) -> &[String] { + &self.granted_features + } + + pub fn request_hit_test(&self, source: HitTestSource) { + let _ = self.sender.send(SessionMsg::RequestHitTest(source)); + } + + pub fn cancel_hit_test(&self, id: HitTestId) { + let _ = self.sender.send(SessionMsg::CancelHitTest(id)); + } + + pub fn update_frame_rate(&mut self, rate: f32, sender: Sender) { + let _ = self.sender.send(SessionMsg::UpdateFrameRate(rate, sender)); + } + + pub fn supported_frame_rates(&self) -> &[f32] { + &self.supported_frame_rates + } +} + +#[derive(PartialEq)] +enum RenderState { + NotInRenderLoop, + InRenderLoop, + PendingQuit, +} + +/// For devices that want to do their own thread management, the `SessionThread` type is exposed. +pub struct SessionThread { + receiver: Receiver, + sender: Sender, + layers: Vec<(ContextId, LayerId)>, + pending_layers: Option>, + frame_count: u64, + frame_sender: Sender, + running: bool, + device: Device, + id: SessionId, + render_state: RenderState, +} + +impl SessionThread +where + Device: DeviceAPI, +{ + pub fn new( + mut device: Device, + frame_sender: Sender, + id: SessionId, + ) -> Result { + let (sender, receiver) = crate::channel().or(Err(Error::CommunicationError))?; + device.set_quitter(Quitter { + sender: sender.clone(), + }); + let frame_count = 0; + let running = true; + let layers = Vec::new(); + let pending_layers = None; + Ok(SessionThread { + sender, + receiver, + device, + layers, + pending_layers, + frame_count, + frame_sender, + running, + id, + render_state: RenderState::NotInRenderLoop, + }) + } + + pub fn new_session(&mut self) -> Session { + let floor_transform = self.device.floor_transform(); + let viewports = self.device.viewports(); + let sender = self.sender.clone(); + let initial_inputs = self.device.initial_inputs(); + let environment_blend_mode = self.device.environment_blend_mode(); + let granted_features = self.device.granted_features().into(); + let supported_frame_rates = self.device.supported_frame_rates(); + Session { + floor_transform, + viewports, + sender, + initial_inputs, + environment_blend_mode, + granted_features, + id: self.id, + supported_frame_rates, + } + } + + pub fn run(&mut self) { + loop { + if let Ok(msg) = self.receiver.recv() { + if !self.handle_msg(msg) { + self.running = false; + break; + } + } else { + break; + } + } + } + + fn handle_msg(&mut self, msg: SessionMsg) -> bool { + log::debug!("processing {:?}", msg); + match msg { + SessionMsg::SetEventDest(dest) => { + self.device.set_event_dest(dest); + } + SessionMsg::RequestHitTest(source) => { + self.device.request_hit_test(source); + } + SessionMsg::CancelHitTest(id) => { + self.device.cancel_hit_test(id); + } + SessionMsg::CreateLayer(context_id, layer_init, sender) => { + let result = self.device.create_layer(context_id, layer_init); + let _ = sender.send(result); + } + SessionMsg::DestroyLayer(context_id, layer_id) => { + self.layers.retain(|&(_, other_id)| layer_id != other_id); + self.device.destroy_layer(context_id, layer_id); + } + SessionMsg::SetLayers(layers) => { + self.pending_layers = Some(layers); + } + SessionMsg::StartRenderLoop => { + if let Some(layers) = self.pending_layers.take() { + self.layers = layers; + } + let frame = match self.device.begin_animation_frame(&self.layers[..]) { + Some(frame) => frame, + None => { + warn!("Device stopped providing frames, exiting"); + return false; + } + }; + self.render_state = RenderState::InRenderLoop; + let _ = self.frame_sender.send(frame); + } + SessionMsg::UpdateClipPlanes(near, far) => self.device.update_clip_planes(near, far), + SessionMsg::RenderAnimationFrame => { + self.frame_count += 1; + + self.device.end_animation_frame(&self.layers[..]); + + if self.render_state == RenderState::PendingQuit { + self.quit(); + return false; + } + + if let Some(layers) = self.pending_layers.take() { + self.layers = layers; + } + #[allow(unused_mut)] + let mut frame = match self.device.begin_animation_frame(&self.layers[..]) { + Some(frame) => frame, + None => { + warn!("Device stopped providing frames, exiting"); + return false; + } + }; + + let _ = self.frame_sender.send(frame); + } + SessionMsg::UpdateFrameRate(rate, sender) => { + let new_framerate = self.device.update_frame_rate(rate); + let _ = sender.send(new_framerate); + } + SessionMsg::Quit => { + if self.render_state == RenderState::NotInRenderLoop { + self.quit(); + return false; + } else { + self.render_state = RenderState::PendingQuit; + } + } + SessionMsg::GetBoundsGeometry(sender) => { + let bounds = self.device.reference_space_bounds(); + let _ = sender.send(bounds); + } + } + true + } + + fn quit(&mut self) { + self.render_state = RenderState::NotInRenderLoop; + self.device.quit(); + } +} + +/// Devices that need to can run sessions on the main thread. +pub trait MainThreadSession: 'static { + fn run_one_frame(&mut self); + fn running(&self) -> bool; +} + +impl MainThreadSession for SessionThread +where + Device: DeviceAPI, +{ + fn run_one_frame(&mut self) { + let frame_count = self.frame_count; + while frame_count == self.frame_count && self.running { + if let Ok(msg) = crate::recv_timeout(&self.receiver, TIMEOUT) { + self.running = self.handle_msg(msg); + } else { + break; + } + } + } + + fn running(&self) -> bool { + self.running + } +} + +/// A type for building XR sessions +pub struct SessionBuilder<'a, GL> { + sessions: &'a mut Vec>, + frame_sender: Sender, + layer_grand_manager: LayerGrandManager, + id: SessionId, +} + +impl<'a, GL: 'static> SessionBuilder<'a, GL> { + pub fn id(&self) -> SessionId { + self.id + } + + pub(crate) fn new( + sessions: &'a mut Vec>, + frame_sender: Sender, + layer_grand_manager: LayerGrandManager, + id: SessionId, + ) -> Self { + SessionBuilder { + sessions, + frame_sender, + layer_grand_manager, + id, + } + } + + /// For devices which are happy to hand over thread management to webxr. + pub fn spawn(self, factory: Factory) -> Result + where + Factory: 'static + FnOnce(LayerGrandManager) -> Result + Send, + Device: DeviceAPI, + { + let (acks, ackr) = crate::channel().or(Err(Error::CommunicationError))?; + let frame_sender = self.frame_sender; + let layer_grand_manager = self.layer_grand_manager; + let id = self.id; + thread::spawn(move || { + match factory(layer_grand_manager) + .and_then(|device| SessionThread::new(device, frame_sender, id)) + { + Ok(mut thread) => { + let session = thread.new_session(); + let _ = acks.send(Ok(session)); + thread.run(); + } + Err(err) => { + let _ = acks.send(Err(err)); + } + } + }); + ackr.recv().unwrap_or(Err(Error::CommunicationError)) + } + + /// For devices that need to run on the main thread. + pub fn run_on_main_thread(self, factory: Factory) -> Result + where + Factory: 'static + FnOnce(LayerGrandManager) -> Result, + Device: DeviceAPI, + { + let device = factory(self.layer_grand_manager)?; + let frame_sender = self.frame_sender; + let mut session_thread = SessionThread::new(device, frame_sender, self.id)?; + let session = session_thread.new_session(); + self.sessions.push(Box::new(session_thread)); + Ok(session) + } +} diff --git a/components/shared/webxr/space.rs b/components/shared/webxr/space.rs new file mode 100644 index 00000000000..4ab116c5b90 --- /dev/null +++ b/components/shared/webxr/space.rs @@ -0,0 +1,28 @@ +use crate::InputId; +use crate::Joint; +use euclid::RigidTransform3D; + +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +/// A stand-in type for "the space isn't statically known since +/// it comes from client side code" +pub struct ApiSpace; + +#[derive(Clone, Copy, Debug, PartialEq)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub enum BaseSpace { + Local, + Floor, + Viewer, + BoundedFloor, + TargetRay(InputId), + Grip(InputId), + Joint(InputId, Joint), +} + +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct Space { + pub base: BaseSpace, + pub offset: RigidTransform3D, +} diff --git a/components/shared/webxr/util.rs b/components/shared/webxr/util.rs new file mode 100644 index 00000000000..e6342d42faf --- /dev/null +++ b/components/shared/webxr/util.rs @@ -0,0 +1,129 @@ +use crate::FrameUpdateEvent; +use crate::HitTestId; +use crate::HitTestSource; +use euclid::Transform3D; + +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +pub struct ClipPlanes { + pub near: f32, + pub far: f32, + /// Was there an update that needs propagation to the client? + update: bool, +} + +impl Default for ClipPlanes { + fn default() -> Self { + ClipPlanes { + near: 0.1, + far: 1000., + update: false, + } + } +} + +impl ClipPlanes { + pub fn update(&mut self, near: f32, far: f32) { + self.near = near; + self.far = far; + self.update = true; + } + + /// Checks for and clears the pending update flag + pub fn recently_updated(&mut self) -> bool { + if self.update { + self.update = false; + true + } else { + false + } + } +} + +#[derive(Clone, Debug, Default)] +#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))] +/// Holds on to hit tests +pub struct HitTestList { + tests: Vec, + uncommitted_tests: Vec, +} + +impl HitTestList { + pub fn request_hit_test(&mut self, source: HitTestSource) { + self.uncommitted_tests.push(source) + } + + pub fn commit_tests(&mut self) -> Vec { + let mut events = vec![]; + for test in self.uncommitted_tests.drain(..) { + events.push(FrameUpdateEvent::HitTestSourceAdded(test.id)); + self.tests.push(test); + } + events + } + + pub fn tests(&self) -> &[HitTestSource] { + &self.tests + } + + pub fn cancel_hit_test(&mut self, id: HitTestId) { + self.tests.retain(|s| s.id != id); + self.uncommitted_tests.retain(|s| s.id != id); + } +} + +#[inline] +/// Construct a projection matrix given the four angles from the center for the faces of the viewing frustum +pub fn fov_to_projection_matrix( + left: f32, + right: f32, + top: f32, + bottom: f32, + clip_planes: ClipPlanes, +) -> Transform3D { + let near = clip_planes.near; + // XXXManishearth deal with infinite planes + let left = left.tan() * near; + let right = right.tan() * near; + let top = top.tan() * near; + let bottom = bottom.tan() * near; + + frustum_to_projection_matrix(left, right, top, bottom, clip_planes) +} + +#[inline] +/// Construct matrix given the actual extent of the viewing frustum on the near plane +pub fn frustum_to_projection_matrix( + left: f32, + right: f32, + top: f32, + bottom: f32, + clip_planes: ClipPlanes, +) -> Transform3D { + let near = clip_planes.near; + let far = clip_planes.far; + + let w = right - left; + let h = top - bottom; + let d = far - near; + + // Column-major order + Transform3D::new( + 2. * near / w, + 0., + 0., + 0., + 0., + 2. * near / h, + 0., + 0., + (right + left) / w, + (top + bottom) / h, + -(far + near) / d, + -1., + 0., + 0., + -2. * far * near / d, + 0., + ) +} diff --git a/components/shared/webxr/view.rs b/components/shared/webxr/view.rs new file mode 100644 index 00000000000..566748f8a7a --- /dev/null +++ b/components/shared/webxr/view.rs @@ -0,0 +1,170 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +//! This crate uses `euclid`'s typed units, and exposes different coordinate spaces. + +use euclid::Rect; +use euclid::RigidTransform3D; +use euclid::Transform3D; + +#[cfg(feature = "ipc")] +use serde::{Deserialize, Serialize}; + +use std::marker::PhantomData; + +/// The coordinate space of the viewer +/// https://immersive-web.github.io/webxr/#dom-xrreferencespacetype-viewer +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Viewer {} + +/// The coordinate space of the floor +/// https://immersive-web.github.io/webxr/#dom-xrreferencespacetype-local-floor +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Floor {} + +/// The coordinate space of the left eye +/// https://immersive-web.github.io/webxr/#dom-xreye-left +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum LeftEye {} + +/// The coordinate space of the right eye +/// https://immersive-web.github.io/webxr/#dom-xreye-right +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum RightEye {} + +/// The coordinate space of the left frustrum of a cubemap +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum CubeLeft {} + +/// The coordinate space of the right frustrum of a cubemap +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum CubeRight {} + +/// The coordinate space of the top frustrum of a cubemap +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum CubeTop {} + +/// The coordinate space of the bottom frustrum of a cubemap +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum CubeBottom {} + +/// The coordinate space of the back frustrum of a cubemap +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum CubeBack {} + +/// Pattern-match on eyes +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct SomeEye(u8, PhantomData); +pub const LEFT_EYE: SomeEye = SomeEye(0, PhantomData); +pub const RIGHT_EYE: SomeEye = SomeEye(1, PhantomData); +pub const VIEWER: SomeEye = SomeEye(2, PhantomData); +pub const CUBE_LEFT: SomeEye = SomeEye(3, PhantomData); +pub const CUBE_RIGHT: SomeEye = SomeEye(4, PhantomData); +pub const CUBE_TOP: SomeEye = SomeEye(5, PhantomData); +pub const CUBE_BOTTOM: SomeEye = SomeEye(6, PhantomData); +pub const CUBE_BACK: SomeEye = SomeEye(7, PhantomData); + +impl PartialEq> for SomeEye { + fn eq(&self, rhs: &SomeEye) -> bool { + self.0 == rhs.0 + } +} + +/// The native 3D coordinate space of the device +/// This is not part of the webvr specification. +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Native {} + +/// The normalized device coordinate space, where the display +/// is from (-1,-1) to (1,1). +// TODO: are we OK assuming that we can use the same coordinate system for all displays? +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Display {} + +/// The unnormalized device coordinate space, where the display +/// is from (0,0) to (w,h), measured in pixels. +// TODO: are we OK assuming that we can use the same coordinate system for all displays? +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Viewport {} + +/// The coordinate space of an input device +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Input {} + +/// The coordinate space of a secondary capture view +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Capture {} + +/// For each eye, the pose of that eye, +/// its projection onto its display. +/// For stereo displays, we have a `View` and a `View`. +/// For mono displays, we hagve a `View` +/// https://immersive-web.github.io/webxr/#xrview +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct View { + pub transform: RigidTransform3D, + pub projection: Transform3D, +} + +impl Default for View { + fn default() -> Self { + View { + transform: RigidTransform3D::identity(), + projection: Transform3D::identity(), + } + } +} + +impl View { + pub fn cast_unit(&self) -> View { + View { + transform: self.transform.cast_unit(), + projection: Transform3D::from_untyped(&self.projection.to_untyped()), + } + } +} + +/// Whether a device is mono or stereo, and the views it supports. +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub enum Views { + /// Mono view for inline VR, viewport and projection matrices are calculated by client + Inline, + Mono(View), + Stereo(View, View), + StereoCapture(View, View, View), + Cubemap( + View, + View, + View, + View, + View, + View, + ), +} + +/// A list of viewports per-eye in the order of fields in Views. +/// +/// Not all must be in active use. +#[derive(Clone, Debug)] +#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))] +pub struct Viewports { + pub viewports: Vec>, +} diff --git a/components/webxr/Cargo.toml b/components/webxr/Cargo.toml new file mode 100644 index 00000000000..23c7c4b85c7 --- /dev/null +++ b/components/webxr/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "webxr" +version = "0.0.1" +authors = ["The Servo Project Developers"] +edition = "2018" + +homepage = "https://github.com/servo/webxr" +repository = "https://github.com/servo/webxr" +keywords = ["ar", "headset", "openxr", "vr", "webxr"] +license = "MPL-2.0" + +description = '''A safe Rust API that provides a way to interact with +virtual reality and augmented reality devices and integration with OpenXR. +The API is inspired by the WebXR Device API (https://www.w3.org/TR/webxr/) +but adapted to Rust design patterns.''' + +[lib] +path = "lib.rs" + +[features] +default = ["x11"] +x11 = ["surfman/sm-x11"] +angle = ["surfman/sm-angle"] +glwindow = [] +headless = [] +ipc = ["webxr-api/ipc", "serde"] +openxr-api = ["angle", "openxr", "winapi", "wio", "surfman/sm-angle-default"] + +[dependencies] +webxr-api = { path = "../shared/webxr" } +crossbeam-channel = "0.5" +euclid = "0.22" +log = "0.4.6" +openxr = { version = "0.19", optional = true } +serde = { version = "1.0", optional = true } +glow = "0.16" +raw-window-handle = "0.6" +surfman = { git = "https://github.com/servo/surfman", rev = "300789ddbda45c89e9165c31118bf1c4c07f89f6", features = [ + "chains", + "sm-raw-window-handle-06", +] } + +[target.'cfg(target_os = "windows")'.dependencies] +winapi = { version = "0.3", features = [ + "dxgi", + "d3d11", + "winerror", +], optional = true } +wio = { version = "0.2", optional = true } diff --git a/components/webxr/gl_utils.rs b/components/webxr/gl_utils.rs new file mode 100644 index 00000000000..fff74017c8d --- /dev/null +++ b/components/webxr/gl_utils.rs @@ -0,0 +1,190 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::SurfmanGL; +use glow as gl; +use glow::Context as Gl; +use glow::HasContext; +use std::collections::HashMap; +use std::num::NonZero; +use surfman::Device as SurfmanDevice; +use webxr_api::ContextId; +use webxr_api::GLContexts; +use webxr_api::LayerId; + +pub(crate) fn framebuffer(framebuffer: u32) -> Option { + NonZero::new(framebuffer).map(gl::NativeFramebuffer) +} + +// A utility to clear a color texture and optional depth/stencil texture +pub(crate) struct GlClearer { + fbos: HashMap< + ( + LayerId, + Option, + Option, + ), + Option, + >, + should_reverse_winding: bool, +} + +impl GlClearer { + pub(crate) fn new(should_reverse_winding: bool) -> GlClearer { + let fbos = HashMap::new(); + GlClearer { + fbos, + should_reverse_winding, + } + } + + fn fbo( + &mut self, + gl: &Gl, + layer_id: LayerId, + color: Option, + color_target: u32, + depth_stencil: Option, + ) -> Option { + let should_reverse_winding = self.should_reverse_winding; + *self + .fbos + .entry((layer_id, color, depth_stencil)) + .or_insert_with(|| { + // Save the current GL state + let mut bound_fbos = [0, 0]; + unsafe { + gl.get_parameter_i32_slice(gl::DRAW_FRAMEBUFFER_BINDING, &mut bound_fbos[0..]); + gl.get_parameter_i32_slice(gl::READ_FRAMEBUFFER_BINDING, &mut bound_fbos[1..]); + + // Generate and set attachments of a new FBO + let fbo = gl.create_framebuffer().ok(); + + gl.bind_framebuffer(gl::FRAMEBUFFER, fbo); + gl.framebuffer_texture_2d( + gl::FRAMEBUFFER, + gl::COLOR_ATTACHMENT0, + color_target, + color, + 0, + ); + gl.framebuffer_texture_2d( + gl::FRAMEBUFFER, + gl::DEPTH_STENCIL_ATTACHMENT, + gl::TEXTURE_2D, + depth_stencil, + 0, + ); + + // Necessary if using an OpenXR runtime that does not support mutable FOV, + // as flipping the projection matrix necessitates reversing the winding order. + if should_reverse_winding { + gl.front_face(gl::CW); + } + + // Restore the GL state + gl.bind_framebuffer(gl::DRAW_FRAMEBUFFER, framebuffer(bound_fbos[0] as _)); + gl.bind_framebuffer(gl::READ_FRAMEBUFFER, framebuffer(bound_fbos[1] as _)); + debug_assert_eq!(gl.get_error(), gl::NO_ERROR); + + fbo + } + }) + } + + pub(crate) fn clear( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + context_id: ContextId, + layer_id: LayerId, + color: Option, + color_target: u32, + depth_stencil: Option, + ) { + let gl = match contexts.bindings(device, context_id) { + None => return, + Some(gl) => gl, + }; + let fbo = self.fbo(gl, layer_id, color, color_target, depth_stencil); + unsafe { + // Save the current GL state + let mut bound_fbos = [0, 0]; + let mut clear_color = [0., 0., 0., 0.]; + let mut clear_depth = [0.]; + let mut clear_stencil = [0]; + let color_mask; + let depth_mask; + let mut stencil_mask = [0]; + let scissor_enabled = gl.is_enabled(gl::SCISSOR_TEST); + let rasterizer_enabled = gl.is_enabled(gl::RASTERIZER_DISCARD); + + gl.get_parameter_i32_slice(gl::DRAW_FRAMEBUFFER_BINDING, &mut bound_fbos[0..]); + gl.get_parameter_i32_slice(gl::READ_FRAMEBUFFER_BINDING, &mut bound_fbos[1..]); + gl.get_parameter_f32_slice(gl::COLOR_CLEAR_VALUE, &mut clear_color[..]); + gl.get_parameter_f32_slice(gl::DEPTH_CLEAR_VALUE, &mut clear_depth[..]); + gl.get_parameter_i32_slice(gl::STENCIL_CLEAR_VALUE, &mut clear_stencil[..]); + depth_mask = gl.get_parameter_bool(gl::DEPTH_WRITEMASK); + gl.get_parameter_i32_slice(gl::STENCIL_WRITEMASK, &mut stencil_mask[..]); + color_mask = gl.get_parameter_bool_array::<4>(gl::COLOR_WRITEMASK); + + // Clear it + gl.bind_framebuffer(gl::FRAMEBUFFER, fbo); + gl.clear_color(0., 0., 0., 1.); + gl.clear_depth(1.); + gl.clear_stencil(0); + gl.disable(gl::SCISSOR_TEST); + gl.disable(gl::RASTERIZER_DISCARD); + gl.depth_mask(true); + gl.stencil_mask(0xFFFFFFFF); + gl.color_mask(true, true, true, true); + gl.clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT | gl::STENCIL_BUFFER_BIT); + + // Restore the GL state + gl.bind_framebuffer(gl::DRAW_FRAMEBUFFER, framebuffer(bound_fbos[0] as _)); + gl.bind_framebuffer(gl::READ_FRAMEBUFFER, framebuffer(bound_fbos[1] as _)); + gl.clear_color( + clear_color[0], + clear_color[1], + clear_color[2], + clear_color[3], + ); + gl.color_mask(color_mask[0], color_mask[1], color_mask[2], color_mask[3]); + gl.clear_depth(clear_depth[0] as f64); + gl.clear_stencil(clear_stencil[0]); + gl.depth_mask(depth_mask); + gl.stencil_mask(stencil_mask[0] as _); + if scissor_enabled { + gl.enable(gl::SCISSOR_TEST); + } + if rasterizer_enabled { + gl.enable(gl::RASTERIZER_DISCARD); + } + debug_assert_eq!(gl.get_error(), gl::NO_ERROR); + } + } + + pub(crate) fn destroy_layer( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + context_id: ContextId, + layer_id: LayerId, + ) { + let gl = match contexts.bindings(device, context_id) { + None => return, + Some(gl) => gl, + }; + self.fbos.retain(|&(other_id, _, _), &mut fbo| { + if layer_id != other_id { + true + } else { + if let Some(fbo) = fbo { + unsafe { gl.delete_framebuffer(fbo) }; + } + false + } + }) + } +} diff --git a/components/webxr/glwindow/mod.rs b/components/webxr/glwindow/mod.rs new file mode 100644 index 00000000000..9f24c85f14e --- /dev/null +++ b/components/webxr/glwindow/mod.rs @@ -0,0 +1,877 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::gl_utils::framebuffer; +use crate::{SurfmanGL, SurfmanLayerManager}; +use core::slice; +use euclid::{ + Angle, Point2D, Rect, RigidTransform3D, Rotation3D, Size2D, Transform3D, UnknownUnit, Vector3D, +}; +use glow::{self as gl, Context as Gl, HasContext}; +use raw_window_handle::DisplayHandle; +use std::num::NonZeroU32; +use std::rc::Rc; +use surfman::chains::{PreserveBuffer, SwapChain, SwapChainAPI, SwapChains, SwapChainsAPI}; +use surfman::{ + Adapter, Connection, Context as SurfmanContext, ContextAttributeFlags, ContextAttributes, + Device as SurfmanDevice, GLApi, GLVersion, NativeWidget, SurfaceAccess, SurfaceType, +}; +use webxr_api::util::ClipPlanes; +use webxr_api::{ + ContextId, DeviceAPI, DiscoveryAPI, Display, Error, Event, EventBuffer, Floor, Frame, + InputSource, LayerGrandManager, LayerId, LayerInit, LayerManager, Native, Quitter, Sender, + Session, SessionBuilder, SessionInit, SessionMode, SomeEye, View, Viewer, ViewerPose, Viewport, + Viewports, Views, CUBE_BACK, CUBE_BOTTOM, CUBE_LEFT, CUBE_RIGHT, CUBE_TOP, LEFT_EYE, RIGHT_EYE, + VIEWER, +}; + +// How far off the ground are the viewer's eyes? +const HEIGHT: f32 = 1.0; + +// What is half the vertical field of view? +const FOV_UP: f32 = 45.0; + +// Some guesstimated numbers, hopefully it doesn't matter if these are off by a bit. + +// What the distance between the viewer's eyes? +const INTER_PUPILLARY_DISTANCE: f32 = 0.06; + +// What is the size of a pixel? +const PIXELS_PER_METRE: f32 = 6000.0; + +pub trait GlWindow { + fn get_render_target( + &self, + device: &mut SurfmanDevice, + context: &mut SurfmanContext, + ) -> GlWindowRenderTarget; + fn get_rotation(&self) -> Rotation3D; + fn get_translation(&self) -> Vector3D; + + fn get_mode(&self) -> GlWindowMode { + GlWindowMode::Blit + } + fn display_handle(&self) -> DisplayHandle; +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub enum GlWindowMode { + Blit, + StereoLeftRight, + StereoRedCyan, + Cubemap, + Spherical, +} + +pub enum GlWindowRenderTarget { + NativeWidget(NativeWidget), + SwapChain(SwapChain), +} + +pub struct GlWindowDiscovery { + connection: Connection, + adapter: Adapter, + context_attributes: ContextAttributes, + window: Rc, +} + +impl GlWindowDiscovery { + pub fn new(window: Rc) -> GlWindowDiscovery { + let connection = Connection::from_display_handle(window.display_handle()).unwrap(); + let adapter = connection.create_adapter().unwrap(); + let flags = ContextAttributeFlags::ALPHA + | ContextAttributeFlags::DEPTH + | ContextAttributeFlags::STENCIL; + let version = match connection.gl_api() { + GLApi::GLES => GLVersion { major: 3, minor: 0 }, + GLApi::GL => GLVersion { major: 3, minor: 2 }, + }; + let context_attributes = ContextAttributes { flags, version }; + GlWindowDiscovery { + connection, + adapter, + context_attributes, + window, + } + } +} + +impl DiscoveryAPI for GlWindowDiscovery { + fn request_session( + &mut self, + mode: SessionMode, + init: &SessionInit, + xr: SessionBuilder, + ) -> Result { + if self.supports_session(mode) { + let granted_features = init.validate(mode, &["local-floor".into()])?; + let connection = self.connection.clone(); + let adapter = self.adapter.clone(); + let context_attributes = self.context_attributes.clone(); + let window = self.window.clone(); + xr.run_on_main_thread(move |grand_manager| { + GlWindowDevice::new( + connection, + adapter, + context_attributes, + window, + granted_features, + grand_manager, + ) + }) + } else { + Err(Error::NoMatchingDevice) + } + } + + fn supports_session(&self, mode: SessionMode) -> bool { + mode == SessionMode::ImmersiveVR || mode == SessionMode::ImmersiveAR + } +} + +pub struct GlWindowDevice { + device: SurfmanDevice, + context: SurfmanContext, + gl: Rc, + window: Rc, + grand_manager: LayerGrandManager, + layer_manager: Option, + target_swap_chain: Option>, + swap_chains: SwapChains, + read_fbo: Option, + events: EventBuffer, + clip_planes: ClipPlanes, + granted_features: Vec, + shader: Option, +} + +impl DeviceAPI for GlWindowDevice { + fn floor_transform(&self) -> Option> { + let translation = Vector3D::new(0.0, HEIGHT, 0.0); + Some(RigidTransform3D::from_translation(translation)) + } + + fn viewports(&self) -> Viewports { + let size = self.viewport_size(); + let viewports = match self.window.get_mode() { + GlWindowMode::Cubemap | GlWindowMode::Spherical => vec![ + Rect::new(Point2D::new(size.width * 1, size.height * 1), size), + Rect::new(Point2D::new(size.width * 0, size.height * 1), size), + Rect::new(Point2D::new(size.width * 2, size.height * 1), size), + Rect::new(Point2D::new(size.width * 2, size.height * 0), size), + Rect::new(Point2D::new(size.width * 0, size.height * 0), size), + Rect::new(Point2D::new(size.width * 1, size.height * 0), size), + ], + GlWindowMode::Blit | GlWindowMode::StereoLeftRight | GlWindowMode::StereoRedCyan => { + vec![ + Rect::new(Point2D::default(), size), + Rect::new(Point2D::new(size.width, 0), size), + ] + } + }; + Viewports { viewports } + } + + fn create_layer(&mut self, context_id: ContextId, init: LayerInit) -> Result { + self.layer_manager()?.create_layer(context_id, init) + } + + fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId) { + self.layer_manager() + .unwrap() + .destroy_layer(context_id, layer_id) + } + + fn begin_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Option { + log::debug!("Begin animation frame for layers {:?}", layers); + let translation = Vector3D::from_untyped(self.window.get_translation()); + let translation: RigidTransform3D<_, _, Native> = + RigidTransform3D::from_translation(translation); + let rotation = Rotation3D::from_untyped(&self.window.get_rotation()); + let rotation = RigidTransform3D::from_rotation(rotation); + let transform = translation.then(&rotation); + let sub_images = self.layer_manager().ok()?.begin_frame(layers).ok()?; + Some(Frame { + pose: Some(ViewerPose { + transform, + views: self.views(transform), + }), + inputs: vec![], + events: vec![], + sub_images, + hit_test_results: vec![], + predicted_display_time: 0.0, + }) + } + + fn end_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) { + log::debug!("End animation frame for layers {:?}", layers); + self.device.make_context_current(&self.context).unwrap(); + debug_assert_eq!(unsafe { self.gl.get_error() }, gl::NO_ERROR); + + let _ = self.layer_manager().unwrap().end_frame(layers); + + let window_size = self.window_size(); + let viewport_size = self.viewport_size(); + + let framebuffer_object = self + .device + .context_surface_info(&self.context) + .unwrap() + .map(|info| info.framebuffer_object) + .unwrap_or(0); + unsafe { + self.gl + .bind_framebuffer(gl::FRAMEBUFFER, framebuffer(framebuffer_object)); + debug_assert_eq!( + ( + self.gl.get_error(), + self.gl.check_framebuffer_status(gl::FRAMEBUFFER) + ), + (gl::NO_ERROR, gl::FRAMEBUFFER_COMPLETE) + ); + + self.gl.clear_color(0.0, 0.0, 0.0, 0.0); + self.gl.clear(gl::COLOR_BUFFER_BIT); + debug_assert_eq!(self.gl.get_error(), gl::NO_ERROR); + } + + for &(_, layer_id) in layers { + let swap_chain = match self.swap_chains.get(layer_id) { + Some(swap_chain) => swap_chain, + None => continue, + }; + let surface = match swap_chain.take_surface() { + Some(surface) => surface, + None => return, + }; + let texture_size = self.device.surface_info(&surface).size; + let surface_texture = self + .device + .create_surface_texture(&mut self.context, surface) + .unwrap(); + let raw_texture_id = self.device.surface_texture_object(&surface_texture); + let texture_id = NonZeroU32::new(raw_texture_id).map(gl::NativeTexture); + let texture_target = self.device.surface_gl_texture_target(); + log::debug!("Presenting texture {}", raw_texture_id); + + if let Some(ref shader) = self.shader { + shader.draw_texture( + texture_id, + texture_target, + texture_size, + viewport_size, + window_size, + ); + } else { + self.blit_texture(texture_id, texture_target, texture_size, window_size); + } + debug_assert_eq!(unsafe { self.gl.get_error() }, gl::NO_ERROR); + + let surface = self + .device + .destroy_surface_texture(&mut self.context, surface_texture) + .unwrap(); + swap_chain.recycle_surface(surface); + } + + match self.target_swap_chain.as_ref() { + Some(target_swap_chain) => { + // Rendering to a surfman swap chain + target_swap_chain + .swap_buffers(&mut self.device, &mut self.context, PreserveBuffer::No) + .unwrap(); + } + None => { + // Rendering to a native widget + let mut surface = self + .device + .unbind_surface_from_context(&mut self.context) + .unwrap() + .unwrap(); + self.device + .present_surface(&self.context, &mut surface) + .unwrap(); + self.device + .bind_surface_to_context(&mut self.context, surface) + .unwrap(); + } + } + + debug_assert_eq!(unsafe { self.gl.get_error() }, gl::NO_ERROR); + } + + fn initial_inputs(&self) -> Vec { + vec![] + } + + fn set_event_dest(&mut self, dest: Sender) { + self.events.upgrade(dest) + } + + fn quit(&mut self) { + self.events.callback(Event::SessionEnd); + } + + fn set_quitter(&mut self, _: Quitter) { + // Glwindow currently doesn't have any way to end its own session + // XXXManishearth add something for this that listens for the window + // being closed + } + + fn update_clip_planes(&mut self, near: f32, far: f32) { + self.clip_planes.update(near, far) + } + + fn granted_features(&self) -> &[String] { + &self.granted_features + } +} + +impl Drop for GlWindowDevice { + fn drop(&mut self) { + if let Some(read_fbo) = self.read_fbo { + unsafe { + self.gl.delete_framebuffer(read_fbo); + } + } + let _ = self.device.destroy_context(&mut self.context); + } +} + +impl GlWindowDevice { + fn new( + connection: Connection, + adapter: Adapter, + context_attributes: ContextAttributes, + window: Rc, + granted_features: Vec, + grand_manager: LayerGrandManager, + ) -> Result { + let mut device = connection.create_device(&adapter).unwrap(); + let context_descriptor = device + .create_context_descriptor(&context_attributes) + .unwrap(); + let mut context = device.create_context(&context_descriptor, None).unwrap(); + device.make_context_current(&context).unwrap(); + + let gl = Rc::new(unsafe { + match device.gl_api() { + GLApi::GL => Gl::from_loader_function(|symbol_name| { + device.get_proc_address(&context, symbol_name) + }), + GLApi::GLES => Gl::from_loader_function(|symbol_name| { + device.get_proc_address(&context, symbol_name) + }), + } + }); + + let target_swap_chain = match window.get_render_target(&mut device, &mut context) { + GlWindowRenderTarget::NativeWidget(native_widget) => { + let surface_type = SurfaceType::Widget { native_widget }; + let surface = device + .create_surface(&context, SurfaceAccess::GPUOnly, surface_type) + .unwrap(); + device + .bind_surface_to_context(&mut context, surface) + .unwrap(); + None + } + GlWindowRenderTarget::SwapChain(target_swap_chain) => { + debug_assert!(target_swap_chain.is_attached()); + Some(target_swap_chain) + } + }; + + let read_fbo = unsafe { gl.create_framebuffer().ok() }; + unsafe { + let framebuffer_object = device + .context_surface_info(&context) + .unwrap() + .map(|info| info.framebuffer_object) + .unwrap_or(0); + gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer(framebuffer_object)); + debug_assert_eq!( + (gl.get_error(), gl.check_framebuffer_status(gl::FRAMEBUFFER)), + (gl::NO_ERROR, gl::FRAMEBUFFER_COMPLETE) + ); + + gl.enable(gl::BLEND); + gl.blend_func_separate( + gl::SRC_ALPHA, + gl::ONE_MINUS_SRC_ALPHA, + gl::ONE, + gl::ONE_MINUS_SRC_ALPHA, + ); + } + + let swap_chains = SwapChains::new(); + let layer_manager = None; + + let shader = GlWindowShader::new(gl.clone(), window.get_mode()); + debug_assert_eq!(unsafe { gl.get_error() }, gl::NO_ERROR); + + Ok(GlWindowDevice { + gl, + window, + device, + context, + read_fbo, + swap_chains, + target_swap_chain, + grand_manager, + layer_manager, + events: Default::default(), + clip_planes: Default::default(), + granted_features, + shader, + }) + } + + fn blit_texture( + &self, + texture_id: Option, + texture_target: u32, + texture_size: Size2D, + window_size: Size2D, + ) { + unsafe { + self.gl + .bind_framebuffer(gl::READ_FRAMEBUFFER, self.read_fbo); + self.gl.framebuffer_texture_2d( + gl::READ_FRAMEBUFFER, + gl::COLOR_ATTACHMENT0, + texture_target, + texture_id, + 0, + ); + self.gl.blit_framebuffer( + 0, + 0, + texture_size.width, + texture_size.height, + 0, + 0, + window_size.width, + window_size.height, + gl::COLOR_BUFFER_BIT, + gl::NEAREST, + ); + } + } + + fn layer_manager(&mut self) -> Result<&mut LayerManager, Error> { + if let Some(ref mut manager) = self.layer_manager { + return Ok(manager); + } + let swap_chains = self.swap_chains.clone(); + let viewports = self.viewports(); + let layer_manager = self.grand_manager.create_layer_manager(move |_, _| { + Ok(SurfmanLayerManager::new(viewports, swap_chains)) + })?; + self.layer_manager = Some(layer_manager); + Ok(self.layer_manager.as_mut().unwrap()) + } + + fn window_size(&self) -> Size2D { + let window_size = self + .device + .context_surface_info(&self.context) + .unwrap() + .unwrap() + .size + .to_i32(); + Size2D::from_untyped(window_size) + } + + fn viewport_size(&self) -> Size2D { + let window_size = self.window_size(); + match self.window.get_mode() { + GlWindowMode::StereoRedCyan => { + // This device has a slightly odd characteristic, which is that anaglyphic stereo + // renders both eyes to the same surface. If we want the two eyes to be parallel, + // and to agree at distance infinity, this means gettng the XR content to render some + // wasted pixels, which are stripped off when we render to the target surface. + // (The wasted pixels are on the right of the left eye and vice versa.) + let wasted_pixels = (INTER_PUPILLARY_DISTANCE / PIXELS_PER_METRE) as i32; + Size2D::new(window_size.width + wasted_pixels, window_size.height) + } + GlWindowMode::Cubemap => { + // Cubemap viewports should be square + let size = 1.max(window_size.width / 3).max(window_size.height / 2); + Size2D::new(size, size) + } + GlWindowMode::Spherical => { + // Cubemap viewports should be square + let size = 1.max(window_size.width / 2).max(window_size.height); + Size2D::new(size, size) + } + GlWindowMode::StereoLeftRight | GlWindowMode::Blit => { + Size2D::new(window_size.width / 2, window_size.height) + } + } + } + + fn views(&self, viewer: RigidTransform3D) -> Views { + match self.window.get_mode() { + GlWindowMode::Cubemap | GlWindowMode::Spherical => Views::Cubemap( + self.view(viewer, VIEWER), + self.view(viewer, CUBE_LEFT), + self.view(viewer, CUBE_RIGHT), + self.view(viewer, CUBE_TOP), + self.view(viewer, CUBE_BOTTOM), + self.view(viewer, CUBE_BACK), + ), + GlWindowMode::Blit | GlWindowMode::StereoLeftRight | GlWindowMode::StereoRedCyan => { + Views::Stereo(self.view(viewer, LEFT_EYE), self.view(viewer, RIGHT_EYE)) + } + } + } + + fn view( + &self, + viewer: RigidTransform3D, + eye: SomeEye, + ) -> View { + let projection = self.perspective(); + let translation = if eye == RIGHT_EYE { + Vector3D::new(-INTER_PUPILLARY_DISTANCE / 2.0, 0.0, 0.0) + } else if eye == LEFT_EYE { + Vector3D::new(INTER_PUPILLARY_DISTANCE / 2.0, 0.0, 0.0) + } else { + Vector3D::zero() + }; + let rotation = if eye == CUBE_TOP { + Rotation3D::euler( + Angle::degrees(270.0), + Angle::degrees(0.0), + Angle::degrees(90.0), + ) + } else if eye == CUBE_BOTTOM { + Rotation3D::euler( + Angle::degrees(90.0), + Angle::degrees(0.0), + Angle::degrees(90.0), + ) + } else if eye == CUBE_LEFT { + Rotation3D::around_y(Angle::degrees(-90.0)) + } else if eye == CUBE_RIGHT { + Rotation3D::around_y(Angle::degrees(90.0)) + } else if eye == CUBE_BACK { + Rotation3D::euler( + Angle::degrees(180.0), + Angle::degrees(0.0), + Angle::degrees(90.0), + ) + } else { + Rotation3D::identity() + }; + let transform: RigidTransform3D = + RigidTransform3D::new(rotation, translation); + View { + transform: transform.inverse().then(&viewer), + projection, + } + } + + fn perspective(&self) -> Transform3D { + let near = self.clip_planes.near; + let far = self.clip_planes.far; + // https://github.com/toji/gl-matrix/blob/bd3307196563fbb331b40fc6ebecbbfcc2a4722c/src/mat4.js#L1271 + let fov_up = match self.window.get_mode() { + GlWindowMode::Spherical | GlWindowMode::Cubemap => Angle::degrees(45.0), + GlWindowMode::Blit | GlWindowMode::StereoLeftRight | GlWindowMode::StereoRedCyan => { + Angle::degrees(FOV_UP) + } + }; + let f = 1.0 / fov_up.radians.tan(); + let nf = 1.0 / (near - far); + let viewport_size = self.viewport_size(); + let aspect = viewport_size.width as f32 / viewport_size.height as f32; + + // Dear rustfmt, This is a 4x4 matrix, please leave it alone. Best, ajeffrey. + { + #[rustfmt::skip] + // Sigh, row-major vs column-major + return Transform3D::new( + f / aspect, 0.0, 0.0, 0.0, + 0.0, f, 0.0, 0.0, + 0.0, 0.0, (far + near) * nf, -1.0, + 0.0, 0.0, 2.0 * far * near * nf, 0.0, + ); + } + } +} + +struct GlWindowShader { + gl: Rc, + buffer: Option, + vao: Option, + program: gl::NativeProgram, + mode: GlWindowMode, +} + +const VERTEX_ATTRIBUTE: u32 = 0; +const VERTICES: &[[f32; 2]; 4] = &[[-1.0, -1.0], [-1.0, 1.0], [1.0, -1.0], [1.0, 1.0]]; + +const PASSTHROUGH_VERTEX_SHADER: &str = " + #version 330 core + layout(location=0) in vec2 coord; + out vec2 vTexCoord; + void main(void) { + gl_Position = vec4(coord, 0.0, 1.0); + vTexCoord = coord * 0.5 + 0.5; + } +"; + +const PASSTHROUGH_FRAGMENT_SHADER: &str = " + #version 330 core + layout(location=0) out vec4 color; + uniform sampler2D image; + in vec2 vTexCoord; + void main() { + color = texture(image, vTexCoord); + } +"; + +const ANAGLYPH_VERTEX_SHADER: &str = " + #version 330 core + layout(location=0) in vec2 coord; + uniform float wasted; // What fraction of the image is wasted? + out vec2 left_coord; + out vec2 right_coord; + void main(void) { + gl_Position = vec4(coord, 0.0, 1.0); + vec2 coordn = coord * 0.5 + 0.5; + left_coord = vec2(mix(wasted/2, 0.5, coordn.x), coordn.y); + right_coord = vec2(mix(0.5, 1-wasted/2, coordn.x), coordn.y); + } +"; + +const ANAGLYPH_RED_CYAN_FRAGMENT_SHADER: &str = " + #version 330 core + layout(location=0) out vec4 color; + uniform sampler2D image; + in vec2 left_coord; + in vec2 right_coord; + void main() { + vec4 left_color = texture(image, left_coord); + vec4 right_color = texture(image, right_coord); + float red = left_color.x; + float green = right_color.y; + float blue = right_color.z; + color = vec4(red, green, blue, 1.0); + } +"; + +const SPHERICAL_VERTEX_SHADER: &str = " + #version 330 core + layout(location=0) in vec2 coord; + out vec2 lon_lat; + const float PI = 3.141592654; + void main(void) { + lon_lat = coord * vec2(PI, 0.5*PI); + gl_Position = vec4(coord, 0.0, 1.0); + } +"; + +const SPHERICAL_FRAGMENT_SHADER: &str = " + #version 330 core + layout(location=0) out vec4 color; + uniform sampler2D image; + in vec2 lon_lat; + void main() { + vec3 direction = vec3( + sin(lon_lat.x)*cos(lon_lat.y), + sin(lon_lat.y), + cos(lon_lat.x)*cos(lon_lat.y) + ); + vec2 vTexCoord; + if ((direction.y > abs(direction.x)) && (direction.y > abs(direction.z))) { + // Looking up + vTexCoord.x = direction.z / (direction.y*6.0) + 5.0/6.0; + vTexCoord.y = direction.x / (direction.y*4.0) + 1.0/4.0; + } else if ((direction.y < -abs(direction.x)) && (direction.y < -abs(direction.z))) { + // Looking down + vTexCoord.x = direction.z / (direction.y*6.0) + 1.0/6.0; + vTexCoord.y = -direction.x / (direction.y*4.0) + 1.0/4.0; + } else if (direction.z < -abs(direction.x)) { + // Looking back + vTexCoord.x = -direction.y / (direction.z*6.0) + 3.0/6.0; + vTexCoord.y = -direction.x / (direction.z*4.0) + 1.0/4.0; + } else if (direction.x < -abs(direction.z)) { + // Looking left + vTexCoord.x = -direction.z / (direction.x*6.0) + 1.0/6.0; + vTexCoord.y = -direction.y / (direction.x*4.0) + 3.0/4.0; + } else if (direction.x > abs(direction.z)) { + // Looking right + vTexCoord.x = -direction.z / (direction.x*6.0) + 5.0/6.0; + vTexCoord.y = direction.y / (direction.x*4.0) + 3.0/4.0; + } else { + // Looking ahead + vTexCoord.x = direction.x / (direction.z*6.0) + 3.0/6.0; + vTexCoord.y = direction.y / (direction.z*4.0) + 3.0/4.0; + } + color = texture(image, vTexCoord); + } +"; + +impl GlWindowShader { + fn new(gl: Rc, mode: GlWindowMode) -> Option { + // The shader source + let (vertex_source, fragment_source) = match mode { + GlWindowMode::Blit => { + return None; + } + GlWindowMode::StereoLeftRight | GlWindowMode::Cubemap => { + (PASSTHROUGH_VERTEX_SHADER, PASSTHROUGH_FRAGMENT_SHADER) + } + GlWindowMode::StereoRedCyan => { + (ANAGLYPH_VERTEX_SHADER, ANAGLYPH_RED_CYAN_FRAGMENT_SHADER) + } + GlWindowMode::Spherical => (SPHERICAL_VERTEX_SHADER, SPHERICAL_FRAGMENT_SHADER), + }; + + // TODO: work out why shaders don't work on macos + if cfg!(target_os = "macos") { + log::warn!("XR shaders may not render on MacOS."); + } + + unsafe { + // The four corners of the window in a VAO, set to attribute 0 + let buffer = gl.create_buffer().ok(); + let vao = gl.create_vertex_array().ok(); + gl.bind_buffer(gl::ARRAY_BUFFER, buffer); + + let data = + slice::from_raw_parts(VERTICES as *const _ as _, std::mem::size_of_val(VERTICES)); + gl.buffer_data_u8_slice(gl::ARRAY_BUFFER, data, gl::STATIC_DRAW); + + gl.bind_vertex_array(vao); + gl.vertex_attrib_pointer_f32( + VERTEX_ATTRIBUTE, + VERTICES[0].len() as i32, + gl::FLOAT, + false, + 0, + 0, + ); + gl.enable_vertex_attrib_array(VERTEX_ATTRIBUTE); + debug_assert_eq!(gl.get_error(), gl::NO_ERROR); + + // The shader program + let program = gl.create_program().unwrap(); + let vertex_shader = gl.create_shader(gl::VERTEX_SHADER).unwrap(); + let fragment_shader = gl.create_shader(gl::FRAGMENT_SHADER).unwrap(); + gl.shader_source(vertex_shader, vertex_source); + gl.compile_shader(vertex_shader); + gl.attach_shader(program, vertex_shader); + gl.shader_source(fragment_shader, fragment_source); + gl.compile_shader(fragment_shader); + gl.attach_shader(program, fragment_shader); + gl.link_program(program); + debug_assert_eq!(gl.get_error(), gl::NO_ERROR); + + // Check for errors + // TODO: something other than panic? + let status = gl.get_shader_compile_status(vertex_shader); + assert!( + status, + "Failed to compile vertex shader: {}", + gl.get_shader_info_log(vertex_shader) + ); + let status = gl.get_shader_compile_status(fragment_shader); + assert!( + status, + "Failed to compile fragment shader: {}", + gl.get_shader_info_log(fragment_shader) + ); + let status = gl.get_program_link_status(program); + assert!( + status, + "Failed to link: {}", + gl.get_program_info_log(program) + ); + + // Clean up + gl.delete_shader(vertex_shader); + debug_assert_eq!(gl.get_error(), gl::NO_ERROR); + gl.delete_shader(fragment_shader); + debug_assert_eq!(gl.get_error(), gl::NO_ERROR); + + // And we're done + Some(GlWindowShader { + gl, + buffer, + vao, + program, + mode, + }) + } + } + + fn draw_texture( + &self, + texture_id: Option, + texture_target: u32, + texture_size: Size2D, + viewport_size: Size2D, + window_size: Size2D, + ) { + unsafe { + self.gl.use_program(Some(self.program)); + + self.gl.enable_vertex_attrib_array(VERTEX_ATTRIBUTE); + self.gl.vertex_attrib_pointer_f32( + VERTEX_ATTRIBUTE, + VERTICES[0].len() as i32, + gl::FLOAT, + false, + 0, + 0, + ); + + debug_assert_eq!(self.gl.get_error(), gl::NO_ERROR); + + self.gl.active_texture(gl::TEXTURE0); + self.gl.bind_texture(texture_target, texture_id); + + match self.mode { + GlWindowMode::StereoRedCyan => { + let wasted = 1.0 + - (texture_size.width as f32 / viewport_size.width as f32) + .max(0.0) + .min(1.0); + let wasted_location = self.gl.get_uniform_location(self.program, "wasted"); + self.gl.uniform_1_f32(wasted_location.as_ref(), wasted); + } + GlWindowMode::Blit + | GlWindowMode::Cubemap + | GlWindowMode::Spherical + | GlWindowMode::StereoLeftRight => {} + } + + self.gl + .viewport(0, 0, window_size.width, window_size.height); + self.gl + .draw_arrays(gl::TRIANGLE_STRIP, 0, VERTICES.len() as i32); + self.gl.disable_vertex_attrib_array(VERTEX_ATTRIBUTE); + debug_assert_eq!(self.gl.get_error(), gl::NO_ERROR); + } + } +} + +impl Drop for GlWindowShader { + fn drop(&mut self) { + unsafe { + if let Some(buffer) = self.buffer { + self.gl.delete_buffer(buffer); + } + if let Some(vao) = self.vao { + self.gl.delete_vertex_array(vao); + } + self.gl.delete_program(self.program); + } + } +} diff --git a/components/webxr/headless/mod.rs b/components/webxr/headless/mod.rs new file mode 100644 index 00000000000..7a8fea01f28 --- /dev/null +++ b/components/webxr/headless/mod.rs @@ -0,0 +1,564 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::SurfmanGL; +use crate::SurfmanLayerManager; +use euclid::{Point2D, RigidTransform3D}; +use std::sync::{Arc, Mutex}; +use std::thread; +use surfman::chains::SwapChains; +use webxr_api::util::{self, ClipPlanes, HitTestList}; +use webxr_api::{ + ApiSpace, BaseSpace, ContextId, DeviceAPI, DiscoveryAPI, Error, Event, EventBuffer, Floor, + Frame, FrameUpdateEvent, HitTestId, HitTestResult, HitTestSource, Input, InputFrame, InputId, + InputSource, LayerGrandManager, LayerId, LayerInit, LayerManager, MockButton, MockDeviceInit, + MockDeviceMsg, MockDiscoveryAPI, MockInputMsg, MockViewInit, MockViewsInit, MockWorld, Native, + Quitter, Ray, Receiver, SelectEvent, SelectKind, Sender, Session, SessionBuilder, SessionInit, + SessionMode, Space, SubImages, View, Viewer, ViewerPose, Viewports, Views, +}; + +pub struct HeadlessMockDiscovery {} + +struct HeadlessDiscovery { + data: Arc>, + supports_vr: bool, + supports_inline: bool, + supports_ar: bool, +} + +struct InputInfo { + source: InputSource, + active: bool, + pointer: Option>, + grip: Option>, + clicking: bool, + buttons: Vec, +} + +struct HeadlessDevice { + data: Arc>, + id: u32, + hit_tests: HitTestList, + granted_features: Vec, + grand_manager: LayerGrandManager, + layer_manager: Option, +} + +struct PerSessionData { + id: u32, + mode: SessionMode, + clip_planes: ClipPlanes, + quitter: Option, + events: EventBuffer, + needs_vp_update: bool, +} + +struct HeadlessDeviceData { + floor_transform: Option>, + viewer_origin: Option>, + supported_features: Vec, + views: MockViewsInit, + needs_floor_update: bool, + inputs: Vec, + sessions: Vec, + disconnected: bool, + world: Option, + next_id: u32, + bounds_geometry: Vec>, +} + +impl MockDiscoveryAPI for HeadlessMockDiscovery { + fn simulate_device_connection( + &mut self, + init: MockDeviceInit, + receiver: Receiver, + ) -> Result>, Error> { + let viewer_origin = init.viewer_origin.clone(); + let floor_transform = init.floor_origin.map(|f| f.inverse()); + let views = init.views.clone(); + let data = HeadlessDeviceData { + floor_transform, + viewer_origin, + supported_features: init.supported_features, + views, + needs_floor_update: false, + inputs: vec![], + sessions: vec![], + disconnected: false, + world: init.world, + next_id: 0, + bounds_geometry: vec![], + }; + let data = Arc::new(Mutex::new(data)); + let data_ = data.clone(); + + thread::spawn(move || { + run_loop(receiver, data_); + }); + Ok(Box::new(HeadlessDiscovery { + data, + supports_vr: init.supports_vr, + supports_inline: init.supports_inline, + supports_ar: init.supports_ar, + })) + } +} + +fn run_loop(receiver: Receiver, data: Arc>) { + while let Ok(msg) = receiver.recv() { + if !data.lock().expect("Mutex poisoned").handle_msg(msg) { + break; + } + } +} + +impl DiscoveryAPI for HeadlessDiscovery { + fn request_session( + &mut self, + mode: SessionMode, + init: &SessionInit, + xr: SessionBuilder, + ) -> Result { + if !self.supports_session(mode) { + return Err(Error::NoMatchingDevice); + } + let data = self.data.clone(); + let mut d = data.lock().unwrap(); + let id = d.next_id; + d.next_id += 1; + let per_session = PerSessionData { + id, + mode, + clip_planes: Default::default(), + quitter: Default::default(), + events: Default::default(), + needs_vp_update: false, + }; + d.sessions.push(per_session); + + let granted_features = init.validate(mode, &d.supported_features)?; + let layer_manager = None; + drop(d); + xr.spawn(move |grand_manager| { + Ok(HeadlessDevice { + data, + id, + granted_features, + hit_tests: HitTestList::default(), + grand_manager, + layer_manager, + }) + }) + } + + fn supports_session(&self, mode: SessionMode) -> bool { + if self.data.lock().unwrap().disconnected { + return false; + } + match mode { + SessionMode::Inline => self.supports_inline, + SessionMode::ImmersiveVR => self.supports_vr, + SessionMode::ImmersiveAR => self.supports_ar, + } + } +} + +fn view( + init: MockViewInit, + viewer: RigidTransform3D, + clip_planes: ClipPlanes, +) -> View { + let projection = if let Some((l, r, t, b)) = init.fov { + util::fov_to_projection_matrix(l, r, t, b, clip_planes) + } else { + init.projection + }; + + View { + transform: init.transform.inverse().then(&viewer), + projection, + } +} + +impl HeadlessDevice { + fn with_per_session(&self, f: impl FnOnce(&mut PerSessionData) -> R) -> R { + f(self + .data + .lock() + .unwrap() + .sessions + .iter_mut() + .find(|s| s.id == self.id) + .unwrap()) + } + + fn layer_manager(&mut self) -> Result<&mut LayerManager, Error> { + if let Some(ref mut manager) = self.layer_manager { + return Ok(manager); + } + let swap_chains = SwapChains::new(); + let viewports = self.viewports(); + let layer_manager = self.grand_manager.create_layer_manager(move |_, _| { + Ok(SurfmanLayerManager::new(viewports, swap_chains)) + })?; + self.layer_manager = Some(layer_manager); + Ok(self.layer_manager.as_mut().unwrap()) + } +} + +impl DeviceAPI for HeadlessDevice { + fn floor_transform(&self) -> Option> { + self.data.lock().unwrap().floor_transform.clone() + } + + fn viewports(&self) -> Viewports { + let d = self.data.lock().unwrap(); + let per_session = d.sessions.iter().find(|s| s.id == self.id).unwrap(); + d.viewports(per_session.mode) + } + + fn create_layer(&mut self, context_id: ContextId, init: LayerInit) -> Result { + self.layer_manager()?.create_layer(context_id, init) + } + + fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId) { + self.layer_manager() + .unwrap() + .destroy_layer(context_id, layer_id) + } + + fn begin_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Option { + let sub_images = self.layer_manager().ok()?.begin_frame(layers).ok()?; + let mut data = self.data.lock().unwrap(); + let mut frame = data.get_frame( + data.sessions.iter().find(|s| s.id == self.id).unwrap(), + sub_images, + ); + let per_session = data.sessions.iter_mut().find(|s| s.id == self.id).unwrap(); + if per_session.needs_vp_update { + per_session.needs_vp_update = false; + let mode = per_session.mode; + let vp = data.viewports(mode); + frame.events.push(FrameUpdateEvent::UpdateViewports(vp)); + } + let events = self.hit_tests.commit_tests(); + frame.events = events; + + if let Some(ref world) = data.world { + for source in self.hit_tests.tests() { + let ray = data.native_ray(source.ray, source.space); + let ray = if let Some(ray) = ray { ray } else { break }; + let hits = world + .regions + .iter() + .filter(|region| source.types.is_type(region.ty)) + .flat_map(|region| ®ion.faces) + .filter_map(|triangle| triangle.intersect(ray)) + .map(|space| HitTestResult { + space, + id: source.id, + }); + frame.hit_test_results.extend(hits); + } + } + + if data.needs_floor_update { + frame.events.push(FrameUpdateEvent::UpdateFloorTransform( + data.floor_transform.clone(), + )); + data.needs_floor_update = false; + } + Some(frame) + } + + fn end_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) { + let _ = self.layer_manager().unwrap().end_frame(layers); + thread::sleep(std::time::Duration::from_millis(20)); + } + + fn initial_inputs(&self) -> Vec { + vec![] + } + + fn set_event_dest(&mut self, dest: Sender) { + self.with_per_session(|s| s.events.upgrade(dest)) + } + + fn quit(&mut self) { + self.with_per_session(|s| s.events.callback(Event::SessionEnd)) + } + + fn set_quitter(&mut self, quitter: Quitter) { + self.with_per_session(|s| s.quitter = Some(quitter)) + } + + fn update_clip_planes(&mut self, near: f32, far: f32) { + self.with_per_session(|s| s.clip_planes.update(near, far)); + } + + fn granted_features(&self) -> &[String] { + &self.granted_features + } + + fn request_hit_test(&mut self, source: HitTestSource) { + self.hit_tests.request_hit_test(source) + } + + fn cancel_hit_test(&mut self, id: HitTestId) { + self.hit_tests.cancel_hit_test(id) + } + + fn reference_space_bounds(&self) -> Option>> { + let bounds = self.data.lock().unwrap().bounds_geometry.clone(); + Some(bounds) + } +} + +impl HeadlessMockDiscovery { + pub fn new() -> HeadlessMockDiscovery { + HeadlessMockDiscovery {} + } +} + +macro_rules! with_all_sessions { + ($self:ident, |$s:ident| $e:expr) => { + for $s in &mut $self.sessions { + $e; + } + }; +} + +impl HeadlessDeviceData { + fn get_frame(&self, s: &PerSessionData, sub_images: Vec) -> Frame { + let views = self.views.clone(); + + let pose = self.viewer_origin.map(|transform| { + let views = if s.mode == SessionMode::Inline { + Views::Inline + } else { + match views { + MockViewsInit::Mono(one) => Views::Mono(view(one, transform, s.clip_planes)), + MockViewsInit::Stereo(one, two) => Views::Stereo( + view(one, transform, s.clip_planes), + view(two, transform, s.clip_planes), + ), + } + }; + + ViewerPose { transform, views } + }); + let inputs = self + .inputs + .iter() + .filter(|i| i.active) + .map(|i| InputFrame { + id: i.source.id, + target_ray_origin: i.pointer, + grip_origin: i.grip, + pressed: false, + squeezed: false, + hand: None, + button_values: vec![], + axis_values: vec![], + input_changed: false, + }) + .collect(); + Frame { + pose, + inputs, + events: vec![], + sub_images, + hit_test_results: vec![], + predicted_display_time: 0.0, + } + } + + fn viewports(&self, mode: SessionMode) -> Viewports { + let vec = if mode == SessionMode::Inline { + vec![] + } else { + match &self.views { + MockViewsInit::Mono(one) => vec![one.viewport], + MockViewsInit::Stereo(one, two) => vec![one.viewport, two.viewport], + } + }; + Viewports { viewports: vec } + } + + fn trigger_select(&mut self, id: InputId, kind: SelectKind, event: SelectEvent) { + for i in 0..self.sessions.len() { + let frame = self.get_frame(&self.sessions[i], Vec::new()); + self.sessions[i] + .events + .callback(Event::Select(id, kind, event, frame)); + } + } + + fn handle_msg(&mut self, msg: MockDeviceMsg) -> bool { + match msg { + MockDeviceMsg::SetWorld(w) => self.world = Some(w), + MockDeviceMsg::ClearWorld => self.world = None, + MockDeviceMsg::SetViewerOrigin(viewer_origin) => { + self.viewer_origin = viewer_origin; + } + MockDeviceMsg::SetFloorOrigin(floor_origin) => { + self.floor_transform = floor_origin.map(|f| f.inverse()); + self.needs_floor_update = true; + } + MockDeviceMsg::SetViews(views) => { + self.views = views; + with_all_sessions!(self, |s| { + s.needs_vp_update = true; + }) + } + MockDeviceMsg::VisibilityChange(v) => { + with_all_sessions!(self, |s| s.events.callback(Event::VisibilityChange(v))) + } + MockDeviceMsg::AddInputSource(init) => { + self.inputs.push(InputInfo { + source: init.source.clone(), + pointer: init.pointer_origin, + grip: init.grip_origin, + active: true, + clicking: false, + buttons: init.supported_buttons, + }); + with_all_sessions!(self, |s| s + .events + .callback(Event::AddInput(init.source.clone()))) + } + MockDeviceMsg::MessageInputSource(id, msg) => { + if let Some(ref mut input) = self.inputs.iter_mut().find(|i| i.source.id == id) { + match msg { + MockInputMsg::SetHandedness(h) => { + input.source.handedness = h; + with_all_sessions!(self, |s| { + s.events + .callback(Event::UpdateInput(id, input.source.clone())) + }); + } + MockInputMsg::SetProfiles(p) => { + input.source.profiles = p; + with_all_sessions!(self, |s| { + s.events + .callback(Event::UpdateInput(id, input.source.clone())) + }); + } + MockInputMsg::SetTargetRayMode(t) => { + input.source.target_ray_mode = t; + with_all_sessions!(self, |s| { + s.events + .callback(Event::UpdateInput(id, input.source.clone())) + }); + } + MockInputMsg::SetPointerOrigin(p) => input.pointer = p, + MockInputMsg::SetGripOrigin(p) => input.grip = p, + MockInputMsg::TriggerSelect(kind, event) => { + if !input.active { + return true; + } + let clicking = input.clicking; + input.clicking = event == SelectEvent::Start; + match event { + SelectEvent::Start => { + self.trigger_select(id, kind, event); + } + SelectEvent::End => { + if clicking { + self.trigger_select(id, kind, SelectEvent::Select); + } else { + self.trigger_select(id, kind, SelectEvent::End); + } + } + SelectEvent::Select => { + self.trigger_select(id, kind, SelectEvent::Start); + self.trigger_select(id, kind, SelectEvent::Select); + } + } + } + MockInputMsg::Disconnect => { + if input.active { + with_all_sessions!(self, |s| s + .events + .callback(Event::RemoveInput(input.source.id))); + input.active = false; + input.clicking = false; + } + } + MockInputMsg::Reconnect => { + if !input.active { + with_all_sessions!(self, |s| s + .events + .callback(Event::AddInput(input.source.clone()))); + input.active = true; + } + } + MockInputMsg::SetSupportedButtons(buttons) => { + input.buttons = buttons; + with_all_sessions!(self, |s| s.events.callback(Event::UpdateInput( + input.source.id, + input.source.clone() + ))); + } + MockInputMsg::UpdateButtonState(state) => { + if let Some(button) = input + .buttons + .iter_mut() + .find(|b| b.button_type == state.button_type) + { + *button = state; + } + } + } + } + } + MockDeviceMsg::Disconnect(s) => { + self.disconnected = true; + with_all_sessions!(self, |s| s.quitter.as_ref().map(|q| q.quit())); + // notify the client that we're done disconnecting + let _ = s.send(()); + return false; + } + MockDeviceMsg::SetBoundsGeometry(g) => { + self.bounds_geometry = g; + } + MockDeviceMsg::SimulateResetPose => { + with_all_sessions!(self, |s| s.events.callback(Event::ReferenceSpaceChanged( + BaseSpace::Local, + RigidTransform3D::identity() + ))); + } + } + true + } + + fn native_ray(&self, ray: Ray, space: Space) -> Option> { + let origin: RigidTransform3D = match space.base { + BaseSpace::Local => RigidTransform3D::identity(), + BaseSpace::Floor => self.floor_transform?.inverse().cast_unit(), + BaseSpace::Viewer => self.viewer_origin?.cast_unit(), + BaseSpace::BoundedFloor => self.floor_transform?.inverse().cast_unit(), + BaseSpace::TargetRay(id) => self + .inputs + .iter() + .find(|i| i.source.id == id)? + .pointer? + .cast_unit(), + BaseSpace::Grip(id) => self + .inputs + .iter() + .find(|i| i.source.id == id)? + .grip? + .cast_unit(), + BaseSpace::Joint(..) => panic!("Cannot request mocking backend with hands"), + }; + let space_origin = space.offset.then(&origin); + + let origin_rigid: RigidTransform3D = ray.origin.into(); + Some(Ray { + origin: origin_rigid.then(&space_origin).translation, + direction: space_origin.rotation.transform_vector3d(ray.direction), + }) + } +} diff --git a/components/webxr/lib.rs b/components/webxr/lib.rs new file mode 100644 index 00000000000..ad731b92f07 --- /dev/null +++ b/components/webxr/lib.rs @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +//! This crate defines the Rust implementation of WebXR for various devices. + +#[cfg(feature = "glwindow")] +pub mod glwindow; + +#[cfg(feature = "headless")] +pub mod headless; + +#[cfg(feature = "openxr-api")] +pub mod openxr; + +pub mod surfman_layer_manager; +pub use surfman_layer_manager::SurfmanGL; +pub use surfman_layer_manager::SurfmanLayerManager; +pub type MainThreadRegistry = webxr_api::MainThreadRegistry; +pub type Discovery = Box>; + +pub(crate) mod gl_utils; diff --git a/components/webxr/openxr/graphics.rs b/components/webxr/openxr/graphics.rs new file mode 100644 index 00000000000..442c3ae952c --- /dev/null +++ b/components/webxr/openxr/graphics.rs @@ -0,0 +1,25 @@ +use euclid::{Size2D, UnknownUnit}; +use openxr::{ExtensionSet, FrameStream, FrameWaiter, Graphics, Instance, Session, SystemId}; +use surfman::Context as SurfmanContext; +use surfman::Device as SurfmanDevice; +use surfman::Error as SurfmanError; +use surfman::SurfaceTexture; +use webxr_api::Error; + +pub enum GraphicsProvider {} + +pub trait GraphicsProviderMethods { + fn enable_graphics_extensions(exts: &mut ExtensionSet); + fn pick_format(formats: &[u32]) -> u32; + fn create_session( + device: &SurfmanDevice, + instance: &Instance, + system: SystemId, + ) -> Result<(Session, FrameWaiter, FrameStream), Error>; + fn surface_texture_from_swapchain_texture( + image: ::SwapchainImage, + device: &mut SurfmanDevice, + context: &mut SurfmanContext, + size: &Size2D, + ) -> Result; +} diff --git a/components/webxr/openxr/graphics_d3d11.rs b/components/webxr/openxr/graphics_d3d11.rs new file mode 100644 index 00000000000..24e7538b129 --- /dev/null +++ b/components/webxr/openxr/graphics_d3d11.rs @@ -0,0 +1,139 @@ +use std::{mem, ptr}; + +use euclid::{Size2D, UnknownUnit}; +use log::warn; +use openxr::d3d::{Requirements, SessionCreateInfoD3D11, D3D11}; +use openxr::{ + ExtensionSet, FormFactor, FrameStream, FrameWaiter, Graphics, Instance, Session, SystemId, +}; +use surfman::Adapter as SurfmanAdapter; +use surfman::Context as SurfmanContext; +use surfman::Device as SurfmanDevice; +use surfman::Error as SurfmanError; +use surfman::SurfaceTexture; +use webxr_api::Error; +use winapi::shared::winerror::{DXGI_ERROR_NOT_FOUND, S_OK}; +use winapi::shared::{dxgi, dxgiformat}; +use winapi::um::d3d11::ID3D11Texture2D; +use winapi::Interface; +use wio::com::ComPtr; + +use crate::openxr::graphics::{GraphicsProvider, GraphicsProviderMethods}; +use crate::openxr::{create_instance, AppInfo}; + +pub type Backend = D3D11; + +impl GraphicsProviderMethods for GraphicsProvider { + fn enable_graphics_extensions(exts: &mut ExtensionSet) { + exts.khr_d3d11_enable = true; + } + + fn pick_format(formats: &[u32]) -> u32 { + // TODO: extract the format from surfman's device and pick a matching + // valid format based on that. For now, assume that eglChooseConfig will + // gravitate to B8G8R8A8. + warn!("Available formats: {:?}", formats); + for format in formats { + match *format { + dxgiformat::DXGI_FORMAT_B8G8R8A8_UNORM_SRGB => return *format, + dxgiformat::DXGI_FORMAT_B8G8R8A8_UNORM => return *format, + //dxgiformat::DXGI_FORMAT_R8G8B8A8_UNORM => return *format, + f => { + warn!("Backend requested unsupported format {:?}", f); + } + } + } + + panic!("No formats supported amongst {:?}", formats); + } + + fn create_session( + device: &SurfmanDevice, + instance: &Instance, + system: SystemId, + ) -> Result<(Session, FrameWaiter, FrameStream), Error> { + // Get the current surfman device and extract its D3D device. This will ensure + // that the OpenXR runtime's texture will be shareable with surfman's surfaces. + let native_device = device.native_device(); + let d3d_device = native_device.d3d11_device; + + // FIXME: we should be using these graphics requirements to drive the actual + // d3d device creation, rather than assuming the device that surfman + // already created is appropriate. OpenXR returns a validation error + // unless we call this method, so we call it and ignore the results + // in the short term. + let _requirements = D3D11::requirements(&instance, system) + .map_err(|e| Error::BackendSpecific(format!("D3D11::requirements {:?}", e)))?; + + unsafe { + instance + .create_session::( + system, + &SessionCreateInfoD3D11 { + device: d3d_device as *mut _, + }, + ) + .map_err(|e| Error::BackendSpecific(format!("Instance::create_session {:?}", e))) + } + } + + fn surface_texture_from_swapchain_texture( + image: ::SwapchainImage, + device: &mut SurfmanDevice, + context: &mut SurfmanContext, + size: &Size2D, + ) -> Result { + unsafe { + let image = ComPtr::from_raw(image as *mut ID3D11Texture2D); + image.AddRef(); + device.create_surface_texture_from_texture(context, size, image) + } + } +} + +fn get_matching_adapter( + requirements: &Requirements, +) -> Result, String> { + unsafe { + let mut factory_ptr: *mut dxgi::IDXGIFactory1 = ptr::null_mut(); + let result = dxgi::CreateDXGIFactory1( + &dxgi::IDXGIFactory1::uuidof(), + &mut factory_ptr as *mut _ as *mut _, + ); + assert_eq!(result, S_OK); + let factory = ComPtr::from_raw(factory_ptr); + + let index = 0; + loop { + let mut adapter_ptr = ptr::null_mut(); + let result = factory.EnumAdapters1(index, &mut adapter_ptr); + if result == DXGI_ERROR_NOT_FOUND { + return Err("No matching adapter".to_owned()); + } + assert_eq!(result, S_OK); + let adapter = ComPtr::from_raw(adapter_ptr); + let mut adapter_desc = mem::zeroed(); + let result = adapter.GetDesc1(&mut adapter_desc); + assert_eq!(result, S_OK); + let adapter_luid = &adapter_desc.AdapterLuid; + if adapter_luid.LowPart == requirements.adapter_luid.LowPart + && adapter_luid.HighPart == requirements.adapter_luid.HighPart + { + return Ok(adapter); + } + } + } +} + +#[allow(unused)] +pub fn create_surfman_adapter() -> Option { + let instance = create_instance(false, false, false, &AppInfo::default()).ok()?; + let system = instance + .instance + .system(FormFactor::HEAD_MOUNTED_DISPLAY) + .ok()?; + + let requirements = D3D11::requirements(&instance.instance, system).ok()?; + let adapter = get_matching_adapter(&requirements).ok()?; + Some(SurfmanAdapter::from_dxgi_adapter(adapter.up())) +} diff --git a/components/webxr/openxr/input.rs b/components/webxr/openxr/input.rs new file mode 100644 index 00000000000..bcd060f3876 --- /dev/null +++ b/components/webxr/openxr/input.rs @@ -0,0 +1,743 @@ +use std::ffi::c_void; +use std::mem::MaybeUninit; + +use euclid::RigidTransform3D; +use log::debug; +use openxr::sys::{ + HandJointLocationsEXT, HandJointsLocateInfoEXT, HandTrackingAimStateFB, + FB_HAND_TRACKING_AIM_EXTENSION_NAME, +}; +use openxr::{ + self, Action, ActionSet, Binding, FrameState, Graphics, Hand as HandEnum, HandJoint, + HandJointLocation, HandTracker, HandTrackingAimFlagsFB, Instance, Path, Posef, Session, Space, + SpaceLocationFlags, HAND_JOINT_COUNT, +}; +use webxr_api::Finger; +use webxr_api::Hand; +use webxr_api::Handedness; +use webxr_api::Input; +use webxr_api::InputFrame; +use webxr_api::InputId; +use webxr_api::InputSource; +use webxr_api::JointFrame; +use webxr_api::Native; +use webxr_api::SelectEvent; +use webxr_api::TargetRayMode; +use webxr_api::Viewer; + +use super::interaction_profiles::InteractionProfile; +use super::IDENTITY_POSE; + +use crate::ext_string; +use crate::openxr::interaction_profiles::INTERACTION_PROFILES; + +/// Number of frames to wait with the menu gesture before +/// opening the menu. +const MENU_GESTURE_SUSTAIN_THRESHOLD: u8 = 60; + +/// Helper macro for binding action paths in an interaction profile entry +macro_rules! bind_inputs { + ($actions:expr, $paths:expr, $hand:expr, $instance:expr, $ret:expr) => { + $actions.iter().enumerate().for_each(|(i, action)| { + let action_path = $paths[i]; + if action_path != "" { + let path = $instance + .string_to_path(&format!("/user/hand/{}/input/{}", $hand, action_path)) + .expect(&format!( + "Failed to create path for /user/hand/{}/input/{}", + $hand, action_path + )); + let binding = Binding::new(action, path); + $ret.push(binding); + } + }); + }; +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +enum ClickState { + Clicking, + Done, +} + +/// All the information on a single input frame +pub struct Frame { + pub frame: InputFrame, + pub select: Option, + pub squeeze: Option, + pub menu_selected: bool, +} + +impl ClickState { + fn update_from_action( + &mut self, + action: &Action, + session: &Session, + menu_selected: bool, + ) -> (/* is_active */ bool, Option) { + let click = action.state(session, Path::NULL).unwrap(); + + let select_event = + self.update_from_value(click.current_state, click.is_active, menu_selected); + + (click.is_active, select_event) + } + + fn update_from_value( + &mut self, + current_state: bool, + is_active: bool, + menu_selected: bool, + ) -> Option { + if is_active { + match (current_state, *self) { + (_, ClickState::Clicking) if menu_selected => { + *self = ClickState::Done; + // Cancel the select, we're showing a menu + Some(SelectEvent::End) + } + (true, ClickState::Done) => { + *self = ClickState::Clicking; + Some(SelectEvent::Start) + } + (false, ClickState::Clicking) => { + *self = ClickState::Done; + Some(SelectEvent::Select) + } + _ => None, + } + } else if *self == ClickState::Clicking { + *self = ClickState::Done; + // Cancel the select, we lost tracking + Some(SelectEvent::End) + } else { + None + } + } +} + +pub struct OpenXRInput { + id: InputId, + action_aim_pose: Action, + action_aim_space: Space, + action_grip_pose: Action, + action_grip_space: Space, + action_click: Action, + action_squeeze: Action, + handedness: Handedness, + click_state: ClickState, + squeeze_state: ClickState, + menu_gesture_sustain: u8, + #[allow(unused)] + hand_tracker: Option, + action_buttons_common: Vec>, + action_buttons_left: Vec>, + action_buttons_right: Vec>, + action_axes_common: Vec>, + use_alternate_input_source: bool, +} + +fn hand_str(h: Handedness) -> &'static str { + match h { + Handedness::Right => "right", + Handedness::Left => "left", + _ => panic!("We don't support unknown handedness in openxr"), + } +} + +impl OpenXRInput { + pub fn new( + id: InputId, + handedness: Handedness, + action_set: &ActionSet, + session: &Session, + needs_hands: bool, + supported_interaction_profiles: Vec<&'static str>, + ) -> Self { + let hand = hand_str(handedness); + let action_aim_pose: Action = action_set + .create_action( + &format!("{}_hand_aim", hand), + &format!("{} hand aim", hand), + &[], + ) + .unwrap(); + let action_aim_space = action_aim_pose + .create_space(session.clone(), Path::NULL, IDENTITY_POSE) + .unwrap(); + let action_grip_pose: Action = action_set + .create_action( + &format!("{}_hand_grip", hand), + &format!("{} hand grip", hand), + &[], + ) + .unwrap(); + let action_grip_space = action_grip_pose + .create_space(session.clone(), Path::NULL, IDENTITY_POSE) + .unwrap(); + let action_click: Action = action_set + .create_action( + &format!("{}_hand_click", hand), + &format!("{} hand click", hand), + &[], + ) + .unwrap(); + let action_squeeze: Action = action_set + .create_action( + &format!("{}_hand_squeeze", hand), + &format!("{} hand squeeze", hand), + &[], + ) + .unwrap(); + + let hand_tracker = if needs_hands { + let hand = match handedness { + Handedness::Left => HandEnum::LEFT, + Handedness::Right => HandEnum::RIGHT, + _ => panic!("We don't support unknown handedness in openxr"), + }; + session.create_hand_tracker(hand).ok() + } else { + None + }; + + let action_buttons_common: Vec> = { + let button1: Action = action_set + .create_action( + &format!("{}_trigger", hand), + &format!("{}_trigger", hand), + &[], + ) + .unwrap(); + let button2: Action = action_set + .create_action(&format!("{}_grip", hand), &format!("{}_grip", hand), &[]) + .unwrap(); + let button3: Action = action_set + .create_action( + &format!("{}_touchpad_click", hand), + &format!("{}_touchpad_click", hand), + &[], + ) + .unwrap(); + let button4: Action = action_set + .create_action( + &format!("{}_thumbstick_click", hand), + &format!("{}_thumbstick_click", hand), + &[], + ) + .unwrap(); + vec![button1, button2, button3, button4] + }; + + let action_buttons_left = { + let button1: Action = action_set + .create_action(&format!("{}_x", hand), &format!("{}_x", hand), &[]) + .unwrap(); + let button2: Action = action_set + .create_action(&format!("{}_y", hand), &format!("{}_y", hand), &[]) + .unwrap(); + vec![button1, button2] + }; + + let action_buttons_right = { + let button1: Action = action_set + .create_action(&format!("{}_a", hand), &format!("{}_a", hand), &[]) + .unwrap(); + let button2: Action = action_set + .create_action(&format!("{}_b", hand), &format!("{}_b", hand), &[]) + .unwrap(); + vec![button1, button2] + }; + + let action_axes_common: Vec> = { + let axis1: Action = action_set + .create_action( + &format!("{}_touchpad_x", hand), + &format!("{}_touchpad_x", hand), + &[], + ) + .unwrap(); + let axis2: Action = action_set + .create_action( + &format!("{}_touchpad_y", hand), + &format!("{}_touchpad_y", hand), + &[], + ) + .unwrap(); + let axis3: Action = action_set + .create_action( + &format!("{}_thumbstick_x", hand), + &format!("{}_thumbstick_x", hand), + &[], + ) + .unwrap(); + let axis4: Action = action_set + .create_action( + &format!("{}_thumbstick_y", hand), + &format!("{}_thumbstick_y", hand), + &[], + ) + .unwrap(); + vec![axis1, axis2, axis3, axis4] + }; + + let use_alternate_input_source = supported_interaction_profiles + .contains(&ext_string!(FB_HAND_TRACKING_AIM_EXTENSION_NAME)); + + Self { + id, + action_aim_pose, + action_aim_space, + action_grip_pose, + action_grip_space, + action_click, + action_squeeze, + handedness, + click_state: ClickState::Done, + squeeze_state: ClickState::Done, + menu_gesture_sustain: 0, + hand_tracker, + action_buttons_common, + action_axes_common, + action_buttons_left, + action_buttons_right, + use_alternate_input_source, + } + } + + pub fn setup_inputs( + instance: &Instance, + session: &Session, + needs_hands: bool, + supported_interaction_profiles: Vec<&'static str>, + ) -> (ActionSet, Self, Self) { + let action_set = instance.create_action_set("hands", "Hands", 0).unwrap(); + let right_hand = OpenXRInput::new( + InputId(0), + Handedness::Right, + &action_set, + &session, + needs_hands, + supported_interaction_profiles.clone(), + ); + let left_hand = OpenXRInput::new( + InputId(1), + Handedness::Left, + &action_set, + &session, + needs_hands, + supported_interaction_profiles.clone(), + ); + + for profile in INTERACTION_PROFILES { + if let Some(extension_name) = profile.required_extension { + if !supported_interaction_profiles.contains(&ext_string!(extension_name)) { + continue; + } + } + + if profile.path.is_empty() { + continue; + } + + let select = profile.standard_buttons[0]; + let squeeze = Option::from(profile.standard_buttons[1]).filter(|&s| !s.is_empty()); + let mut bindings = right_hand.get_bindings(instance, select, squeeze, &profile); + bindings.extend( + left_hand + .get_bindings(instance, select, squeeze, &profile) + .into_iter(), + ); + + let path_controller = instance + .string_to_path(profile.path) + .expect(format!("Invalid interaction profile path: {}", profile.path).as_str()); + if let Err(_) = + instance.suggest_interaction_profile_bindings(path_controller, &bindings) + { + debug!( + "Interaction profile path not available for this runtime: {:?}", + profile.path + ); + } + } + + session.attach_action_sets(&[&action_set]).unwrap(); + + (action_set, right_hand, left_hand) + } + + fn get_bindings( + &self, + instance: &Instance, + select_name: &str, + squeeze_name: Option<&str>, + interaction_profile: &InteractionProfile, + ) -> Vec { + let hand = hand_str(self.handedness); + let path_aim_pose = instance + .string_to_path(&format!("/user/hand/{}/input/aim/pose", hand)) + .expect(&format!( + "Failed to create path for /user/hand/{}/input/aim/pose", + hand + )); + let binding_aim_pose = Binding::new(&self.action_aim_pose, path_aim_pose); + let path_grip_pose = instance + .string_to_path(&format!("/user/hand/{}/input/grip/pose", hand)) + .expect(&format!( + "Failed to create path for /user/hand/{}/input/grip/pose", + hand + )); + let binding_grip_pose = Binding::new(&self.action_grip_pose, path_grip_pose); + let path_click = instance + .string_to_path(&format!("/user/hand/{}/input/{}", hand, select_name)) + .expect(&format!( + "Failed to create path for /user/hand/{}/input/{}", + hand, select_name + )); + let binding_click = Binding::new(&self.action_click, path_click); + + let mut ret = vec![binding_aim_pose, binding_grip_pose, binding_click]; + if let Some(squeeze_name) = squeeze_name { + let path_squeeze = instance + .string_to_path(&format!("/user/hand/{}/input/{}", hand, squeeze_name)) + .expect(&format!( + "Failed to create path for /user/hand/{}/input/{}", + hand, squeeze_name + )); + let binding_squeeze = Binding::new(&self.action_squeeze, path_squeeze); + ret.push(binding_squeeze); + } + + bind_inputs!( + self.action_buttons_common, + interaction_profile.standard_buttons, + hand, + instance, + ret + ); + + if !interaction_profile.left_buttons.is_empty() && hand == "left" { + bind_inputs!( + self.action_buttons_left, + interaction_profile.left_buttons, + hand, + instance, + ret + ); + } else if !interaction_profile.right_buttons.is_empty() && hand == "right" { + bind_inputs!( + self.action_buttons_right, + interaction_profile.right_buttons, + hand, + instance, + ret + ); + } + + bind_inputs!( + self.action_axes_common, + interaction_profile.standard_axes, + hand, + instance, + ret + ); + + ret + } + + pub fn frame( + &mut self, + session: &Session, + frame_state: &FrameState, + base_space: &Space, + viewer: &RigidTransform3D, + ) -> Frame { + use euclid::Vector3D; + let mut target_ray_origin = pose_for(&self.action_aim_space, frame_state, base_space); + + let grip_origin = pose_for(&self.action_grip_space, frame_state, base_space); + + let mut menu_selected = false; + // Check if the palm is facing up. This is our "menu" gesture. + if let Some(grip_origin) = grip_origin { + // The X axis of the grip is perpendicular to the palm, however its + // direction is the opposite for each hand + // + // We obtain a unit vector pointing out of the palm + let x_dir = if let Handedness::Left = self.handedness { + 1.0 + } else { + -1.0 + }; + // Rotate it by the grip to obtain the desired vector + let grip_x = grip_origin + .rotation + .transform_vector3d(Vector3D::new(x_dir, 0.0, 0.0)); + let gaze = viewer + .rotation + .transform_vector3d(Vector3D::new(0., 0., 1.)); + + // If the angle is close enough to 0, its cosine will be + // close to 1 + // check if the user's gaze is parallel to the palm + if gaze.dot(grip_x) > 0.95 { + let input_relative = (viewer.translation - grip_origin.translation).normalize(); + // if so, check if the user is actually looking at the palm + if gaze.dot(input_relative) > 0.95 { + self.menu_gesture_sustain += 1; + if self.menu_gesture_sustain > MENU_GESTURE_SUSTAIN_THRESHOLD { + menu_selected = true; + self.menu_gesture_sustain = 0; + } + } else { + self.menu_gesture_sustain = 0 + } + } else { + self.menu_gesture_sustain = 0; + } + } else { + self.menu_gesture_sustain = 0; + } + + let hand = hand_str(self.handedness); + let click = self.action_click.state(session, Path::NULL).unwrap(); + let squeeze = self.action_squeeze.state(session, Path::NULL).unwrap(); + let (button_values, buttons_changed) = { + let mut changed = false; + let mut values = Vec::::new(); + let mut sync_buttons = |actions: &Vec>| { + let buttons = actions + .iter() + .map(|action| { + let state = action.state(session, Path::NULL).unwrap(); + changed = changed || state.changed_since_last_sync; + state.current_state + }) + .collect::>(); + values.extend_from_slice(&buttons); + }; + sync_buttons(&self.action_buttons_common); + if hand == "left" { + sync_buttons(&self.action_buttons_left); + } else if hand == "right" { + sync_buttons(&self.action_buttons_right); + } + (values, changed) + }; + + let (axis_values, axes_changed) = { + let mut changed = false; + let values = self + .action_axes_common + .iter() + .enumerate() + .map(|(i, action)| { + let state = action.state(session, Path::NULL).unwrap(); + changed = changed || state.changed_since_last_sync; + // Invert input from y axes + state.current_state * if i % 2 == 1 { -1.0 } else { 1.0 } + }) + .collect::>(); + (values, changed) + }; + + let input_changed = buttons_changed || axes_changed; + + let (click_is_active, mut click_event) = if !self.use_alternate_input_source { + self.click_state + .update_from_action(&self.action_click, session, menu_selected) + } else { + (true, None) + }; + let (squeeze_is_active, squeeze_event) = + self.squeeze_state + .update_from_action(&self.action_squeeze, session, menu_selected); + + let mut aim_state: Option = None; + let hand = self.hand_tracker.as_ref().and_then(|tracker| { + locate_hand( + base_space, + tracker, + frame_state, + self.use_alternate_input_source, + session, + &mut aim_state, + ) + }); + + let mut pressed = click_is_active && click.current_state; + let squeezed = squeeze_is_active && squeeze.current_state; + + if let Some(state) = aim_state { + target_ray_origin.replace(super::transform(&state.aim_pose)); + let index_pinching = state + .status + .intersects(HandTrackingAimFlagsFB::INDEX_PINCHING); + click_event = self + .click_state + .update_from_value(index_pinching, true, menu_selected); + pressed = index_pinching; + } + + let input_frame = InputFrame { + target_ray_origin, + id: self.id, + pressed, + squeezed, + grip_origin, + hand, + button_values, + axis_values, + input_changed, + }; + + Frame { + frame: input_frame, + select: click_event, + squeeze: squeeze_event, + menu_selected, + } + } + + pub fn input_source(&self) -> InputSource { + let hand_support = if self.hand_tracker.is_some() { + // openxr runtimes must always support all or none joints + Some(Hand::<()>::default().map(|_, _| Some(()))) + } else { + None + }; + InputSource { + handedness: self.handedness, + id: self.id, + target_ray_mode: TargetRayMode::TrackedPointer, + supports_grip: true, + profiles: vec![], + hand_support, + } + } +} + +fn pose_for( + action_space: &Space, + frame_state: &FrameState, + base_space: &Space, +) -> Option> { + let location = action_space + .locate(base_space, frame_state.predicted_display_time) + .unwrap(); + let pose_valid = location + .location_flags + .intersects(SpaceLocationFlags::POSITION_VALID | SpaceLocationFlags::ORIENTATION_VALID); + if pose_valid { + Some(super::transform(&location.pose)) + } else { + None + } +} + +fn locate_hand( + base_space: &Space, + tracker: &HandTracker, + frame_state: &FrameState, + use_alternate_input_source: bool, + session: &Session, + aim_state: &mut Option, +) -> Option>> { + let mut state = HandTrackingAimStateFB::out(std::ptr::null_mut()); + let locations = { + if !use_alternate_input_source { + base_space.locate_hand_joints(tracker, frame_state.predicted_display_time) + } else { + let locate_info = HandJointsLocateInfoEXT { + ty: HandJointsLocateInfoEXT::TYPE, + next: std::ptr::null(), + base_space: base_space.as_raw(), + time: frame_state.predicted_display_time, + }; + + let mut locations = MaybeUninit::<[HandJointLocation; HAND_JOINT_COUNT]>::uninit(); + let mut location_info = HandJointLocationsEXT { + ty: HandJointLocationsEXT::TYPE, + next: &mut state as *mut _ as *mut c_void, + is_active: false.into(), + joint_count: HAND_JOINT_COUNT as u32, + joint_locations: locations.as_mut_ptr() as _, + }; + + // Check if hand tracking is supported by the session instance + let raw_hand_tracker = session.instance().exts().ext_hand_tracking.as_ref()?; + + unsafe { + Ok( + match (raw_hand_tracker.locate_hand_joints)( + tracker.as_raw(), + &locate_info, + &mut location_info, + ) { + openxr::sys::Result::SUCCESS if location_info.is_active.into() => { + aim_state.replace(state.assume_init()); + Some(locations.assume_init()) + } + _ => None, + }, + ) + } + } + }; + let locations = if let Ok(Some(ref locations)) = locations { + Hand { + wrist: Some(&locations[HandJoint::WRIST]), + thumb_metacarpal: Some(&locations[HandJoint::THUMB_METACARPAL]), + thumb_phalanx_proximal: Some(&locations[HandJoint::THUMB_PROXIMAL]), + thumb_phalanx_distal: Some(&locations[HandJoint::THUMB_DISTAL]), + thumb_phalanx_tip: Some(&locations[HandJoint::THUMB_TIP]), + index: Finger { + metacarpal: Some(&locations[HandJoint::INDEX_METACARPAL]), + phalanx_proximal: Some(&locations[HandJoint::INDEX_PROXIMAL]), + phalanx_intermediate: Some(&locations[HandJoint::INDEX_INTERMEDIATE]), + phalanx_distal: Some(&locations[HandJoint::INDEX_DISTAL]), + phalanx_tip: Some(&locations[HandJoint::INDEX_TIP]), + }, + middle: Finger { + metacarpal: Some(&locations[HandJoint::MIDDLE_METACARPAL]), + phalanx_proximal: Some(&locations[HandJoint::MIDDLE_PROXIMAL]), + phalanx_intermediate: Some(&locations[HandJoint::MIDDLE_INTERMEDIATE]), + phalanx_distal: Some(&locations[HandJoint::MIDDLE_DISTAL]), + phalanx_tip: Some(&locations[HandJoint::MIDDLE_TIP]), + }, + ring: Finger { + metacarpal: Some(&locations[HandJoint::RING_METACARPAL]), + phalanx_proximal: Some(&locations[HandJoint::RING_PROXIMAL]), + phalanx_intermediate: Some(&locations[HandJoint::RING_INTERMEDIATE]), + phalanx_distal: Some(&locations[HandJoint::RING_DISTAL]), + phalanx_tip: Some(&locations[HandJoint::RING_TIP]), + }, + little: Finger { + metacarpal: Some(&locations[HandJoint::LITTLE_METACARPAL]), + phalanx_proximal: Some(&locations[HandJoint::LITTLE_PROXIMAL]), + phalanx_intermediate: Some(&locations[HandJoint::LITTLE_INTERMEDIATE]), + phalanx_distal: Some(&locations[HandJoint::LITTLE_DISTAL]), + phalanx_tip: Some(&locations[HandJoint::LITTLE_TIP]), + }, + } + } else { + return None; + }; + + Some(Box::new(locations.map(|loc, _| { + loc.and_then(|location| { + let pose_valid = location.location_flags.intersects( + SpaceLocationFlags::POSITION_VALID | SpaceLocationFlags::ORIENTATION_VALID, + ); + if pose_valid { + Some(JointFrame { + pose: super::transform(&location.pose), + radius: location.radius, + }) + } else { + None + } + }) + }))) +} diff --git a/components/webxr/openxr/interaction_profiles.rs b/components/webxr/openxr/interaction_profiles.rs new file mode 100644 index 00000000000..5192fc2c775 --- /dev/null +++ b/components/webxr/openxr/interaction_profiles.rs @@ -0,0 +1,444 @@ +use openxr::{ + sys::{ + BD_CONTROLLER_INTERACTION_EXTENSION_NAME, EXT_HAND_INTERACTION_EXTENSION_NAME, + EXT_HP_MIXED_REALITY_CONTROLLER_EXTENSION_NAME, + EXT_SAMSUNG_ODYSSEY_CONTROLLER_EXTENSION_NAME, FB_HAND_TRACKING_AIM_EXTENSION_NAME, + FB_TOUCH_CONTROLLER_PRO_EXTENSION_NAME, + HTC_VIVE_COSMOS_CONTROLLER_INTERACTION_EXTENSION_NAME, + HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_EXTENSION_NAME, + META_TOUCH_CONTROLLER_PLUS_EXTENSION_NAME, ML_ML2_CONTROLLER_INTERACTION_EXTENSION_NAME, + }, + ExtensionSet, +}; + +#[macro_export] +macro_rules! ext_string { + ($ext_name:expr) => { + std::str::from_utf8($ext_name).unwrap() + }; +} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum InteractionProfileType { + KhrSimpleController, + BytedancePicoNeo3Controller, + BytedancePico4Controller, + BytedancePicoG3Controller, + GoogleDaydreamController, + HpMixedRealityController, + HtcViveController, + HtcViveCosmosController, + HtcViveFocus3Controller, + MagicLeap2Controller, + MicrosoftMixedRealityMotionController, + OculusGoController, + OculusTouchController, + FacebookTouchControllerPro, + MetaTouchPlusController, + MetaTouchControllerRiftCv1, + MetaTouchControllerQuest1RiftS, + MetaTouchControllerQuest2, + SamsungOdysseyController, + ValveIndexController, + ExtHandInteraction, + FbHandTrackingAim, +} + +#[derive(Clone, Copy, Debug)] +pub struct InteractionProfile<'a> { + pub profile_type: InteractionProfileType, + /// The interaction profile path + pub path: &'static str, + /// The OpenXR extension, if any, required to use this profile + pub required_extension: Option<&'a [u8]>, + /// Trigger, Grip, Touchpad, Thumbstick + pub standard_buttons: &'a [&'a str], + /// Touchpad X, Touchpad Y, Thumbstick X, Thumbstick Y + pub standard_axes: &'a [&'a str], + /// Any additional buttons on the left controller + pub left_buttons: &'a [&'a str], + /// Any additional buttons on the right controller + pub right_buttons: &'a [&'a str], + /// The corresponding WebXR Input Profile names + pub profiles: &'a [&'a str], +} + +pub static KHR_SIMPLE_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::KhrSimpleController, + path: "/interaction_profiles/khr/simple_controller", + required_extension: None, + standard_buttons: &["select/click", "", "", ""], + standard_axes: &["", "", "", ""], + left_buttons: &[], + right_buttons: &[], + profiles: &["generic-trigger"], +}; + +pub static BYTEDANCE_PICO_NEO3_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::BytedancePicoNeo3Controller, + path: "/interaction_profiles/bytedance/pico_neo3_controller", + required_extension: Some(BD_CONTROLLER_INTERACTION_EXTENSION_NAME), + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &["pico-neo3", "generic-trigger-squeeze-thumbstick"], +}; + +pub static BYTEDANCE_PICO_4_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::BytedancePico4Controller, + path: "/interaction_profiles/bytedance/pico4_controller", + required_extension: Some(BD_CONTROLLER_INTERACTION_EXTENSION_NAME), + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &["pico-4", "generic-trigger-squeeze-thumbstick"], +}; + +pub static BYTEDANCE_PICO_G3_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::BytedancePicoG3Controller, + path: "/interaction_profiles/bytedance/pico_g3_controller", + required_extension: Some(BD_CONTROLLER_INTERACTION_EXTENSION_NAME), + standard_buttons: &["trigger/value", "", "", "thumbstick/click"], + // Note: X/Y components not listed in the OpenXR spec currently due to vendor error. + // See + // It also uses the thumbstick path despite clearly being a touchpad, so + // move those values into the touchpad axes slots + standard_axes: &["thumbstick/x", "thumbstick/y", "", ""], + left_buttons: &[], + right_buttons: &[], + // Note: There is no corresponding WebXR Input profile for the Pico G3, + // but the controller seems identical to the G2, so use that instead. + profiles: &["pico-g2", "generic-trigger-touchpad"], +}; + +pub static GOOGLE_DAYDREAM_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::GoogleDaydreamController, + path: "/interaction_profiles/google/daydream_controller", + required_extension: None, + standard_buttons: &["select/click", "", "trackpad/click", ""], + standard_axes: &["trackpad/x", "trackpad/y", "", ""], + left_buttons: &[], + right_buttons: &[], + profiles: &["google-daydream", "generic-touchpad"], +}; + +pub static HP_MIXED_REALITY_MOTION_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::HpMixedRealityController, + path: "/interaction_profiles/hp/mixed_reality_controller", + required_extension: Some(EXT_HP_MIXED_REALITY_CONTROLLER_EXTENSION_NAME), + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &[ + "hp-mixed-reality", + "oculus-touch", + "generic-trigger-squeeze-thumbstick", + ], +}; + +pub static HTC_VIVE_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::HtcViveController, + path: "/interaction_profiles/htc/vive_controller", + required_extension: None, + standard_buttons: &["trigger/value", "squeeze/click", "trackpad/click", ""], + standard_axes: &["trackpad/x", "trackpad/y", "", ""], + left_buttons: &[], + right_buttons: &[], + profiles: &["htc-vive", "generic-trigger-squeeze-touchpad"], +}; + +pub static HTC_VIVE_COSMOS_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::HtcViveCosmosController, + path: "/interaction_profiles/htc/vive_cosmos_controller", + required_extension: Some(HTC_VIVE_COSMOS_CONTROLLER_INTERACTION_EXTENSION_NAME), + standard_buttons: &["trigger/value", "squeeze/click", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &["htc-vive-cosmos", "generic-trigger-squeeze-thumbstick"], +}; + +pub static HTC_VIVE_FOCUS3_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::HtcViveFocus3Controller, + path: "/interaction_profiles/htc/vive_focus3_controller", + required_extension: Some(HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_EXTENSION_NAME), + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &["htc-vive-focus-3", "generic-trigger-squeeze-thumbstick"], +}; + +pub static MAGIC_LEAP_2_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::MagicLeap2Controller, + path: "/interaction_profiles/ml/ml2_controller", + required_extension: Some(ML_ML2_CONTROLLER_INTERACTION_EXTENSION_NAME), + standard_buttons: &["trigger/value", "", "trackpad/click", ""], + standard_axes: &["trackpad/x", "trackpad/y", "", ""], + left_buttons: &[], + right_buttons: &[], + // Note: There is no corresponding WebXR Input profile for the Magic Leap 2, + // but the controller seems mostly identical to the 1, so use that instead. + profiles: &["magicleap-one", "generic-trigger-squeeze-touchpad"], +}; + +pub static MICROSOFT_MIXED_REALITY_MOTION_CONTROLLER_PROFILE: InteractionProfile = + InteractionProfile { + profile_type: InteractionProfileType::MicrosoftMixedRealityMotionController, + path: "/interaction_profiles/microsoft/motion_controller", + required_extension: None, + standard_buttons: &[ + "trigger/value", + "squeeze/click", + "trackpad/click", + "thumbstick/click", + ], + standard_axes: &["trackpad/x", "trackpad/y", "thumbstick/x", "thumbstick/y"], + left_buttons: &[], + right_buttons: &[], + profiles: &[ + "microsoft-mixed-reality", + "generic-trigger-squeeze-touchpad-thumbstick", + ], + }; + +pub static OCULUS_GO_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::OculusGoController, + path: "/interaction_profiles/oculus/go_controller", + required_extension: None, + standard_buttons: &["trigger/click", "", "trackpad/click", ""], + standard_axes: &["trackpad/x", "trackpad/y", "", ""], + left_buttons: &[], + right_buttons: &[], + profiles: &["oculus-go", "generic-trigger-touchpad"], +}; + +pub static OCULUS_TOUCH_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::OculusTouchController, + path: "/interaction_profiles/oculus/touch_controller", + required_extension: None, + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &[ + "oculus-touch-v3", + "oculus-touch-v2", + "oculus-touch", + "generic-trigger-squeeze-thumbstick", + ], +}; + +pub static FACEBOOK_TOUCH_CONTROLLER_PRO_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::FacebookTouchControllerPro, + path: "/interaction_profiles/facebook/touch_controller_pro", + required_extension: Some(FB_TOUCH_CONTROLLER_PRO_EXTENSION_NAME), + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &[ + "meta-quest-touch-pro", + "oculus-touch-v2", + "oculus-touch", + "generic-trigger-squeeze-thumbstick", + ], +}; + +pub static META_TOUCH_CONTROLLER_PLUS_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::MetaTouchPlusController, + path: "/interaction_profiles/meta/touch_controller_plus", + required_extension: Some(META_TOUCH_CONTROLLER_PLUS_EXTENSION_NAME), + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &[ + "meta-quest-touch-plus", + "oculus-touch-v3", + "oculus-touch", + "generic-trigger-squeeze-thumbstick", + ], +}; + +pub static META_TOUCH_CONTROLLER_RIFT_CV1_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::MetaTouchControllerRiftCv1, + path: "/interaction_profiles/meta/touch_controller_rift_cv1", + required_extension: None, + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &["oculus-touch", "generic-trigger-squeeze-thumbstick"], +}; + +pub static META_TOUCH_CONTROLLER_QUEST_1_RIFT_S_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::MetaTouchControllerQuest1RiftS, + path: "/interaction_profiles/meta/touch_controller_quest_1_rift_s", + required_extension: None, + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &[ + "oculus-touch-v2", + "oculus-touch", + "generic-trigger-squeeze-thumbstick", + ], +}; + +pub static META_TOUCH_CONTROLLER_QUEST_2_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::MetaTouchControllerQuest2, + path: "/interaction_profiles/meta/touch_controller_quest_2", + required_extension: None, + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["", "", "thumbstick/x", "thumbstick/y"], + left_buttons: &["x/click", "y/click"], + right_buttons: &["a/click", "b/click"], + profiles: &[ + "oculus-touch-v3", + "oculus-touch-v2", + "oculus-touch", + "generic-trigger-squeeze-thumbstick", + ], +}; + +pub static SAMSUNG_ODYSSEY_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::SamsungOdysseyController, + path: "/interaction_profiles/samsung/odyssey_controller", + required_extension: Some(EXT_SAMSUNG_ODYSSEY_CONTROLLER_EXTENSION_NAME), + standard_buttons: &[ + "trigger/value", + "squeeze/click", + "trackpad/click", + "thumbstick/click", + ], + standard_axes: &["trackpad/x", "trackpad/y", "thumbstick/x", "thumbstick/y"], + left_buttons: &[], + right_buttons: &[], + profiles: &[ + "samsung-odyssey", + "microsoft-mixed-reality", + "generic-trigger-squeeze-touchpad-thumbstick", + ], +}; + +pub static VALVE_INDEX_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::ValveIndexController, + path: "/interaction_profiles/valve/index_controller", + required_extension: None, + standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"], + standard_axes: &["trackpad/x", "trackpad/y", "thumbstick/x", "thumbstick/y"], + left_buttons: &["a/click", "b/click"], + right_buttons: &["a/click", "b/click"], + profiles: &["valve-index", "generic-trigger-squeeze-touchpad-thumbstick"], +}; + +pub static EXT_HAND_INTERACTION_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::ExtHandInteraction, + path: "/interaction_profiles/ext/hand_interaction_ext", + required_extension: Some(EXT_HAND_INTERACTION_EXTENSION_NAME), + standard_buttons: &["pinch_ext/value", "", "", ""], + standard_axes: &["", "", "", ""], + left_buttons: &[], + right_buttons: &[], + profiles: &["generic-hand-select", "generic-hand"], +}; + +pub static FB_HAND_TRACKING_AIM_PROFILE: InteractionProfile = InteractionProfile { + profile_type: InteractionProfileType::FbHandTrackingAim, + path: "", + required_extension: Some(FB_HAND_TRACKING_AIM_EXTENSION_NAME), + standard_buttons: &["", "", "", ""], + standard_axes: &["", "", "", ""], + left_buttons: &[], + right_buttons: &[], + profiles: &["generic-hand-select", "generic-hand"], +}; + +pub static INTERACTION_PROFILES: [InteractionProfile; 22] = [ + KHR_SIMPLE_CONTROLLER_PROFILE, + BYTEDANCE_PICO_NEO3_CONTROLLER_PROFILE, + BYTEDANCE_PICO_4_CONTROLLER_PROFILE, + BYTEDANCE_PICO_G3_CONTROLLER_PROFILE, + GOOGLE_DAYDREAM_CONTROLLER_PROFILE, + HP_MIXED_REALITY_MOTION_CONTROLLER_PROFILE, + HTC_VIVE_CONTROLLER_PROFILE, + HTC_VIVE_COSMOS_CONTROLLER_PROFILE, + HTC_VIVE_FOCUS3_CONTROLLER_PROFILE, + MAGIC_LEAP_2_CONTROLLER_PROFILE, + MICROSOFT_MIXED_REALITY_MOTION_CONTROLLER_PROFILE, + OCULUS_GO_CONTROLLER_PROFILE, + OCULUS_TOUCH_CONTROLLER_PROFILE, + FACEBOOK_TOUCH_CONTROLLER_PRO_PROFILE, + META_TOUCH_CONTROLLER_PLUS_PROFILE, + META_TOUCH_CONTROLLER_RIFT_CV1_PROFILE, + META_TOUCH_CONTROLLER_QUEST_1_RIFT_S_PROFILE, + META_TOUCH_CONTROLLER_QUEST_2_PROFILE, + SAMSUNG_ODYSSEY_CONTROLLER_PROFILE, + VALVE_INDEX_CONTROLLER_PROFILE, + EXT_HAND_INTERACTION_PROFILE, + FB_HAND_TRACKING_AIM_PROFILE, +]; + +pub fn get_profiles_from_path(path: String) -> &'static [&'static str] { + INTERACTION_PROFILES + .iter() + .find(|profile| profile.path == path) + .map_or(&[], |profile| profile.profiles) +} + +pub fn get_supported_interaction_profiles( + supported_extensions: &ExtensionSet, + enabled_extensions: &mut ExtensionSet, +) -> Vec<&'static str> { + let mut extensions = Vec::new(); + if supported_extensions.bd_controller_interaction { + extensions.push(ext_string!(BD_CONTROLLER_INTERACTION_EXTENSION_NAME)); + enabled_extensions.bd_controller_interaction = true; + } + if supported_extensions.ext_hp_mixed_reality_controller { + extensions.push(ext_string!(EXT_HP_MIXED_REALITY_CONTROLLER_EXTENSION_NAME)); + enabled_extensions.ext_hp_mixed_reality_controller = true; + } + if supported_extensions.ext_samsung_odyssey_controller { + extensions.push(ext_string!(EXT_SAMSUNG_ODYSSEY_CONTROLLER_EXTENSION_NAME)); + enabled_extensions.ext_samsung_odyssey_controller = true; + } + if supported_extensions.ml_ml2_controller_interaction { + extensions.push(ext_string!(ML_ML2_CONTROLLER_INTERACTION_EXTENSION_NAME)); + enabled_extensions.ml_ml2_controller_interaction = true; + } + if supported_extensions.htc_vive_cosmos_controller_interaction { + extensions.push(ext_string!( + HTC_VIVE_COSMOS_CONTROLLER_INTERACTION_EXTENSION_NAME + )); + enabled_extensions.htc_vive_cosmos_controller_interaction = true; + } + if supported_extensions.htc_vive_focus3_controller_interaction { + extensions.push(ext_string!( + HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_EXTENSION_NAME + )); + enabled_extensions.htc_vive_focus3_controller_interaction = true; + } + if supported_extensions.fb_touch_controller_pro { + extensions.push(ext_string!(FB_TOUCH_CONTROLLER_PRO_EXTENSION_NAME)); + enabled_extensions.fb_touch_controller_pro = true; + } + if supported_extensions.meta_touch_controller_plus { + extensions.push(ext_string!(META_TOUCH_CONTROLLER_PLUS_EXTENSION_NAME)); + enabled_extensions.meta_touch_controller_plus = true; + } + if supported_extensions.ext_hand_interaction { + extensions.push(ext_string!(EXT_HAND_INTERACTION_EXTENSION_NAME)); + enabled_extensions.ext_hand_interaction = true; + } + if supported_extensions.fb_hand_tracking_aim { + extensions.push(ext_string!(FB_HAND_TRACKING_AIM_EXTENSION_NAME)); + enabled_extensions.fb_hand_tracking_aim = true; + } + extensions +} diff --git a/components/webxr/openxr/mod.rs b/components/webxr/openxr/mod.rs new file mode 100644 index 00000000000..2981c91b83a --- /dev/null +++ b/components/webxr/openxr/mod.rs @@ -0,0 +1,1594 @@ +use crate::gl_utils::GlClearer; +use crate::SurfmanGL; + +use euclid::Box2D; +use euclid::Point2D; +use euclid::Rect; +use euclid::RigidTransform3D; +use euclid::Rotation3D; +use euclid::Size2D; +use euclid::Transform3D; +use euclid::Vector3D; +use glow::PixelUnpackData; +use glow::{self as gl, HasContext}; +use interaction_profiles::{get_profiles_from_path, get_supported_interaction_profiles}; +use log::{error, warn}; +use openxr::sys::CompositionLayerPassthroughFB; +use openxr::{ + self, ActionSet, ActiveActionSet, ApplicationInfo, CompositionLayerBase, CompositionLayerFlags, + CompositionLayerProjection, Entry, EnvironmentBlendMode, ExtensionSet, Extent2Di, FormFactor, + Fovf, FrameState, FrameStream, FrameWaiter, Graphics, Instance, Passthrough, + PassthroughFlagsFB, PassthroughLayer, PassthroughLayerPurposeFB, Posef, Quaternionf, + ReferenceSpaceType, SecondaryEndInfo, Session, Space, Swapchain, SwapchainCreateFlags, + SwapchainCreateInfo, SwapchainUsageFlags, SystemId, Vector3f, Version, ViewConfigurationType, +}; +use std::collections::HashMap; +use std::mem; +use std::num::NonZeroU32; +use std::ops::Deref; +use std::sync::{Arc, Mutex}; +use std::thread; +use std::time::Duration; +use surfman::Context as SurfmanContext; +use surfman::Device as SurfmanDevice; +use surfman::Error as SurfmanError; +use surfman::SurfaceTexture; +use webxr_api; +use webxr_api::util::{self, ClipPlanes}; +use webxr_api::BaseSpace; +use webxr_api::Capture; +use webxr_api::ContextId; +use webxr_api::DeviceAPI; +use webxr_api::DiscoveryAPI; +use webxr_api::Display; +use webxr_api::Error; +use webxr_api::Event; +use webxr_api::EventBuffer; +use webxr_api::Floor; +use webxr_api::Frame; +use webxr_api::GLContexts; +use webxr_api::InputId; +use webxr_api::InputSource; +use webxr_api::LayerGrandManager; +use webxr_api::LayerId; +use webxr_api::LayerInit; +use webxr_api::LayerManager; +use webxr_api::LayerManagerAPI; +use webxr_api::LeftEye; +use webxr_api::Native; +use webxr_api::Quitter; +use webxr_api::RightEye; +use webxr_api::SelectKind; +use webxr_api::Sender; +use webxr_api::Session as WebXrSession; +use webxr_api::SessionBuilder; +use webxr_api::SessionInit; +use webxr_api::SessionMode; +use webxr_api::SubImage; +use webxr_api::SubImages; +use webxr_api::View; +use webxr_api::ViewerPose; +use webxr_api::Viewport; +use webxr_api::Viewports; +use webxr_api::Views; +use webxr_api::Visibility; + +mod input; +use input::OpenXRInput; +mod graphics; +mod interaction_profiles; +use graphics::{GraphicsProvider, GraphicsProviderMethods}; + +#[cfg(target_os = "windows")] +mod graphics_d3d11; +#[cfg(target_os = "windows")] +use graphics_d3d11::Backend; + +const HEIGHT: f32 = 1.4; + +const IDENTITY_POSE: Posef = Posef { + orientation: Quaternionf { + x: 0., + y: 0., + z: 0., + w: 1., + }, + position: Vector3f { + x: 0., + y: 0., + z: 0., + }, +}; + +const VIEW_INIT: openxr::View = openxr::View { + pose: IDENTITY_POSE, + fov: Fovf { + angle_left: 0., + angle_right: 0., + angle_up: 0., + angle_down: 0., + }, +}; + +// How much to downscale the view capture by. +// This is used for performance reasons, to dedicate less texture memory to the camera. +// Note that on an HL2 this allocates enough texture memory for "low power" mode, +// not "high quality" (in the device portal under +// Views > Mixed Reality Capture > Photo and Video Settings). +const SECONDARY_VIEW_DOWNSCALE: i32 = 2; + +/// Provides a way to spawn and interact with context menus +pub trait ContextMenuProvider: Send { + /// Open a context menu, return a way to poll for the result + fn open_context_menu(&self) -> Box; + /// Clone self as a trait object + fn clone_object(&self) -> Box; +} + +/// A way to poll for the result of the context menu request +pub trait ContextMenuFuture { + fn poll(&self) -> ContextMenuResult; +} + +/// The result of polling on a context menu request +pub enum ContextMenuResult { + /// Session should exit + ExitSession, + /// Dialog was dismissed + Dismissed, + /// User has not acted on dialog + Pending, +} + +#[derive(Default)] +pub struct AppInfo { + application_name: String, + application_version: u32, + engine_name: String, + engine_version: u32, +} + +impl AppInfo { + pub fn new( + application_name: &str, + application_version: u32, + engine_name: &str, + engine_version: u32, + ) -> AppInfo { + Self { + application_name: application_name.to_string(), + application_version, + engine_name: engine_name.to_string(), + engine_version, + } + } +} + +struct ViewInfo { + view: openxr::View, + extent: Extent2Di, + cached_projection: Transform3D, +} + +impl ViewInfo { + fn set_view(&mut self, view: openxr::View, clip_planes: ClipPlanes) { + self.view.pose = view.pose; + if self.view.fov.angle_left != view.fov.angle_left + || self.view.fov.angle_right != view.fov.angle_right + || self.view.fov.angle_up != view.fov.angle_up + || self.view.fov.angle_down != view.fov.angle_down + { + // It's fine if this happens occasionally, but if this happening very + // often we should stop caching + warn!("FOV changed, updating projection matrices"); + self.view.fov = view.fov; + self.recompute_projection(clip_planes); + } + } + + fn recompute_projection(&mut self, clip_planes: ClipPlanes) { + self.cached_projection = fov_to_projection_matrix(&self.view.fov, clip_planes); + } + + fn view(&self) -> View { + View { + transform: transform(&self.view.pose), + projection: self.cached_projection, + } + } +} + +pub struct OpenXrDiscovery { + context_menu_provider: Option>, + app_info: AppInfo, +} + +impl OpenXrDiscovery { + pub fn new( + context_menu_provider: Option>, + app_info: AppInfo, + ) -> Self { + Self { + context_menu_provider, + app_info, + } + } +} + +pub struct CreatedInstance { + instance: Instance, + supports_hands: bool, + supports_secondary: bool, + system: SystemId, + supports_mutable_fov: bool, + supported_interaction_profiles: Vec<&'static str>, + supports_passthrough: bool, + supports_updating_framerate: bool, +} + +pub fn create_instance( + needs_hands: bool, + needs_secondary: bool, + needs_passthrough: bool, + app_info: &AppInfo, +) -> Result { + let entry = unsafe { Entry::load().map_err(|e| format!("Entry::load {:?}", e))? }; + let supported = entry + .enumerate_extensions() + .map_err(|e| format!("Entry::enumerate_extensions {:?}", e))?; + warn!("Available extensions:\n{:?}", supported); + let mut supports_hands = needs_hands && supported.ext_hand_tracking; + let supports_passthrough = needs_passthrough && supported.fb_passthrough; + let supports_secondary = needs_secondary + && supported.msft_secondary_view_configuration + && supported.msft_first_person_observer; + let supports_updating_framerate = supported.fb_display_refresh_rate; + + let app_info = ApplicationInfo { + application_name: &app_info.application_name, + application_version: app_info.application_version, + engine_name: &app_info.engine_name, + engine_version: app_info.engine_version, + api_version: Version::new(1, 0, 36), + }; + + let mut exts = ExtensionSet::default(); + GraphicsProvider::enable_graphics_extensions(&mut exts); + if supports_hands { + exts.ext_hand_tracking = true; + } + + if supports_secondary { + exts.msft_secondary_view_configuration = true; + exts.msft_first_person_observer = true; + } + + if supports_passthrough { + exts.fb_passthrough = true; + } + + if supports_updating_framerate { + exts.fb_display_refresh_rate = true; + } + + let supported_interaction_profiles = get_supported_interaction_profiles(&supported, &mut exts); + + let instance = entry + .create_instance(&app_info, &exts, &[]) + .map_err(|e| format!("Entry::create_instance {:?}", e))?; + let system = instance + .system(FormFactor::HEAD_MOUNTED_DISPLAY) + .map_err(|e| format!("Instance::system {:?}", e))?; + + if supports_hands { + supports_hands |= instance + .supports_hand_tracking(system) + .map_err(|e| format!("Instance::supports_hand_tracking {:?}", e))?; + } + + let supports_mutable_fov = { + let properties = instance + .view_configuration_properties(system, ViewConfigurationType::PRIMARY_STEREO) + .map_err(|e| format!("Instance::view_configuration_properties {:?}", e))?; + // Unfortunately we need to do a platform check here as just flipping the FOVs for the + // composition layer is seemingly no longer sufficient. As long as windows sessions are + // solely backed by D3D11, we'll need to apply the same inverted view + reversed winding + // fix for SteamVR as well as Oculus. + properties.fov_mutable && !cfg!(target_os = "windows") + }; + + Ok(CreatedInstance { + instance, + supports_hands, + supports_secondary, + system, + supports_mutable_fov, + supported_interaction_profiles, + supports_passthrough, + supports_updating_framerate, + }) +} + +impl DiscoveryAPI for OpenXrDiscovery { + fn request_session( + &mut self, + mode: SessionMode, + init: &SessionInit, + xr: SessionBuilder, + ) -> Result { + if self.supports_session(mode) { + let needs_hands = init.feature_requested("hand-tracking"); + let needs_secondary = + init.feature_requested("secondary-views") && init.first_person_observer_view; + let needs_passthrough = mode == SessionMode::ImmersiveAR; + let instance = create_instance( + needs_hands, + needs_secondary, + needs_passthrough, + &self.app_info, + ) + .map_err(|e| Error::BackendSpecific(e))?; + + let mut supported_features = vec!["local-floor".into(), "bounded-floor".into()]; + if instance.supports_hands { + supported_features.push("hand-tracking".into()); + } + if instance.supports_secondary && init.first_person_observer_view { + supported_features.push("secondary-views".into()); + } + let granted_features = init.validate(mode, &supported_features)?; + let context_menu_provider = self.context_menu_provider.take(); + xr.spawn(move |grand_manager| { + OpenXrDevice::new( + instance, + granted_features, + context_menu_provider, + grand_manager, + ) + }) + } else { + Err(Error::NoMatchingDevice) + } + } + + fn supports_session(&self, mode: SessionMode) -> bool { + let mut supports = false; + // Determining AR support requires enumerating environment blend modes, + // but this requires an already created XrInstance and SystemId. + // We'll make a "default" instance here to check the blend modes, + // then a proper one in request_session with hands/secondary support if needed. + let needs_passthrough = mode == SessionMode::ImmersiveAR; + if let Ok(instance) = create_instance(false, false, needs_passthrough, &self.app_info) { + if let Ok(blend_modes) = instance.instance.enumerate_environment_blend_modes( + instance.system, + ViewConfigurationType::PRIMARY_STEREO, + ) { + if mode == SessionMode::ImmersiveAR { + supports = blend_modes.contains(&EnvironmentBlendMode::ADDITIVE) + || blend_modes.contains(&EnvironmentBlendMode::ALPHA_BLEND) + || instance.supports_passthrough; + } else if mode == SessionMode::ImmersiveVR { + // Immersive VR sessions are not precluded by non-opaque blending + supports = blend_modes.len() > 0; + } + } + } + supports + } +} + +struct OpenXrDevice { + session: Arc>, + instance: Instance, + events: EventBuffer, + frame_waiter: FrameWaiter, + layer_manager: LayerManager, + viewer_space: Space, + shared_data: Arc>>, + clip_planes: ClipPlanes, + supports_secondary: bool, + supports_mutable_fov: bool, + supports_updating_framerate: bool, + + // input + action_set: ActionSet, + right_hand: OpenXRInput, + left_hand: OpenXRInput, + granted_features: Vec, + context_menu_provider: Option>, + context_menu_future: Option>, +} + +/// Data that is shared between the openxr thread and the +/// layer manager that runs in the webgl thread. +struct SharedData { + left: ViewInfo, + right: ViewInfo, + secondary: Option>, + secondary_active: bool, + primary_blend_mode: EnvironmentBlendMode, + secondary_blend_mode: Option, + frame_state: Option, + space: Space, + swapchain_sample_count: u32, +} + +struct OpenXrLayerManager { + session: Arc>, + shared_data: Arc>>, + frame_stream: FrameStream, + layers: Vec<(ContextId, LayerId)>, + openxr_layers: HashMap, + clearer: GlClearer, + _passthrough: Option, + passthrough_layer: Option, +} + +struct OpenXrLayer { + swapchain: Swapchain, + depth_stencil_texture: Option, + size: Size2D, + images: Vec<::SwapchainImage>, + surface_textures: Vec>, + waited: bool, +} + +impl OpenXrLayerManager { + fn new( + session: Arc>, + shared_data: Arc>>, + frame_stream: FrameStream, + should_reverse_winding: bool, + _passthrough: Option, + passthrough_layer: Option, + ) -> OpenXrLayerManager { + let layers = Vec::new(); + let openxr_layers = HashMap::new(); + let clearer = GlClearer::new(should_reverse_winding); + OpenXrLayerManager { + session, + shared_data, + frame_stream, + layers, + openxr_layers, + clearer, + _passthrough, + passthrough_layer, + } + } +} + +impl OpenXrLayer { + fn new( + swapchain: Swapchain, + depth_stencil_texture: Option, + size: Size2D, + ) -> Result { + let images = swapchain + .enumerate_images() + .map_err(|e| Error::BackendSpecific(format!("Session::enumerate_images {:?}", e)))?; + let waited = false; + let mut surface_textures = Vec::new(); + surface_textures.resize_with(images.len(), || None); + Ok(OpenXrLayer { + swapchain, + depth_stencil_texture, + size, + images, + surface_textures, + waited, + }) + } + + fn get_surface_texture( + &mut self, + device: &mut SurfmanDevice, + context: &mut SurfmanContext, + index: usize, + ) -> Result<&SurfaceTexture, SurfmanError> { + let result = self + .surface_textures + .get_mut(index) + .ok_or(SurfmanError::Failed)?; + if let Some(result) = result { + return Ok(result); + } + let surface_texture = GraphicsProvider::surface_texture_from_swapchain_texture( + self.images[index], + device, + context, + &self.size.to_untyped(), + )?; + *result = Some(surface_texture); + result.as_ref().ok_or(SurfmanError::Failed) + } +} + +impl LayerManagerAPI for OpenXrLayerManager { + fn create_layer( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + context_id: ContextId, + init: LayerInit, + ) -> Result { + let guard = self.shared_data.lock().unwrap(); + let data = guard.as_ref().unwrap(); + + // XXXManishearth should we be doing this, or letting Servo set the format? + let formats = self.session.enumerate_swapchain_formats().map_err(|e| { + Error::BackendSpecific(format!("Session::enumerate_swapchain_formats {:?}", e)) + })?; + let format = GraphicsProvider::pick_format(&formats); + let texture_size = init.texture_size(&data.viewports()); + let sample_count = data.swapchain_sample_count; + let swapchain_create_info = SwapchainCreateInfo { + create_flags: SwapchainCreateFlags::EMPTY, + usage_flags: SwapchainUsageFlags::COLOR_ATTACHMENT | SwapchainUsageFlags::SAMPLED, + width: texture_size.width as u32, + height: texture_size.height as u32, + format, + sample_count, + face_count: 1, + array_size: 1, + mip_count: 1, + }; + let swapchain = self + .session + .create_swapchain(&swapchain_create_info) + .map_err(|e| Error::BackendSpecific(format!("Session::create_swapchain {:?}", e)))?; + + // TODO: Treat depth and stencil separately? + // TODO: Use the openxr API for depth/stencil swap chains? + let has_depth_stencil = match init { + LayerInit::WebGLLayer { stencil, depth, .. } => stencil | depth, + LayerInit::ProjectionLayer { stencil, depth, .. } => stencil | depth, + }; + let depth_stencil_texture = if has_depth_stencil { + let gl = contexts + .bindings(device, context_id) + .ok_or(Error::NoMatchingDevice)?; + unsafe { + let depth_stencil_texture = gl.create_texture().ok(); + gl.bind_texture(gl::TEXTURE_2D, depth_stencil_texture); + gl.tex_image_2d( + gl::TEXTURE_2D, + 0, + gl::DEPTH24_STENCIL8 as _, + texture_size.width, + texture_size.height, + 0, + gl::DEPTH_STENCIL, + gl::UNSIGNED_INT_24_8, + PixelUnpackData::Slice(None), + ); + depth_stencil_texture + } + } else { + None + }; + + let layer_id = LayerId::new(); + let openxr_layer = OpenXrLayer::new(swapchain, depth_stencil_texture, texture_size)?; + self.layers.push((context_id, layer_id)); + self.openxr_layers.insert(layer_id, openxr_layer); + Ok(layer_id) + } + + fn destroy_layer( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + context_id: ContextId, + layer_id: LayerId, + ) { + self.clearer + .destroy_layer(device, contexts, context_id, layer_id); + self.layers.retain(|&ids| ids != (context_id, layer_id)); + if let Some(mut layer) = self.openxr_layers.remove(&layer_id) { + if let Some(depth_stencil_texture) = layer.depth_stencil_texture { + let gl = contexts.bindings(device, context_id).unwrap(); + unsafe { gl.delete_texture(depth_stencil_texture) }; + } + let mut context = contexts + .context(device, context_id) + .expect("missing GL context"); + for surface_texture in mem::replace(&mut layer.surface_textures, vec![]) { + if let Some(surface_texture) = surface_texture { + let mut surface = device + .destroy_surface_texture(&mut context, surface_texture) + .unwrap(); + device.destroy_surface(&mut context, &mut surface).unwrap(); + } + } + } + } + + fn layers(&self) -> &[(ContextId, LayerId)] { + &self.layers[..] + } + + fn end_frame( + &mut self, + _device: &mut SurfmanDevice, + _contexts: &mut dyn GLContexts, + layers: &[(ContextId, LayerId)], + ) -> Result<(), Error> { + let guard = self.shared_data.lock().unwrap(); + let data = guard.as_ref().unwrap(); + + // At this point the frame contents have been rendered, so we can release access to the texture + // in preparation for displaying it. + for (_, openxr_layer) in &mut self.openxr_layers { + if openxr_layer.waited { + openxr_layer.swapchain.release_image().map_err(|e| { + Error::BackendSpecific(format!("Session::release_image {:?}", e)) + })?; + openxr_layer.waited = false; + } + } + + let openxr_layers = &self.openxr_layers; + + // Invert the up/down angles so that openxr flips the texture in the y axis. + // Additionally, swap between the L/R views to compensate for inverted up/down FOVs. + // This has no effect in runtimes that don't support fovMutable + let mut l_fov = data.left.view.fov; + let mut r_fov = data.right.view.fov; + if cfg!(target_os = "windows") { + std::mem::swap(&mut l_fov.angle_up, &mut r_fov.angle_down); + std::mem::swap(&mut r_fov.angle_up, &mut l_fov.angle_down); + } + + let viewports = data.viewports(); + let primary_views = layers + .iter() + .filter_map(|&(_, layer_id)| { + let openxr_layer = openxr_layers.get(&layer_id)?; + Some([ + openxr::CompositionLayerProjectionView::new() + .pose(data.left.view.pose) + .fov(l_fov) + .sub_image( + openxr::SwapchainSubImage::new() + .swapchain(&openxr_layer.swapchain) + .image_array_index(0) + .image_rect(image_rect(viewports.viewports[0])), + ), + openxr::CompositionLayerProjectionView::new() + .pose(data.right.view.pose) + .fov(r_fov) + .sub_image( + openxr::SwapchainSubImage::new() + .swapchain(&openxr_layer.swapchain) + .image_array_index(0) + .image_rect(image_rect(viewports.viewports[1])), + ), + ]) + }) + .collect::>(); + + let primary_layers = primary_views + .iter() + .map(|views| { + CompositionLayerProjection::new() + .space(&data.space) + .layer_flags(CompositionLayerFlags::BLEND_TEXTURE_SOURCE_ALPHA) + .views(&views[..]) + }) + .collect::>(); + + let mut primary_layers = primary_layers + .iter() + .map(|layer| layer.deref()) + .collect::>(); + + if let Some(passthrough_layer) = &self.passthrough_layer { + let clp = CompositionLayerPassthroughFB { + ty: CompositionLayerPassthroughFB::TYPE, + next: std::ptr::null(), + flags: CompositionLayerFlags::BLEND_TEXTURE_SOURCE_ALPHA, + space: openxr::sys::Space::from_raw(0), + layer_handle: *passthrough_layer.inner(), + }; + let passthrough_base = &clp as *const _ as *const CompositionLayerBase; + unsafe { + primary_layers.insert(0, &*passthrough_base); + } + } + + if let (Some(secondary), true) = (data.secondary.as_ref(), data.secondary_active) { + let mut s_fov = secondary.view.fov; + std::mem::swap(&mut s_fov.angle_up, &mut s_fov.angle_down); + let secondary_views = layers + .iter() + .filter_map(|&(_, layer_id)| { + let openxr_layer = openxr_layers.get(&layer_id)?; + Some([openxr::CompositionLayerProjectionView::new() + .pose(secondary.view.pose) + .fov(s_fov) + .sub_image( + openxr::SwapchainSubImage::new() + .swapchain(&openxr_layer.swapchain) + .image_array_index(0) + .image_rect(image_rect(viewports.viewports[2])), + )]) + }) + .collect::>(); + + let secondary_layers = secondary_views + .iter() + .map(|views| { + CompositionLayerProjection::new() + .space(&data.space) + .layer_flags(CompositionLayerFlags::BLEND_TEXTURE_SOURCE_ALPHA) + .views(&views[..]) + }) + .collect::>(); + + let secondary_layers = secondary_layers + .iter() + .map(|layer| layer.deref()) + .collect::>(); + + self.frame_stream + .end_secondary( + data.frame_state.as_ref().unwrap().predicted_display_time, + data.primary_blend_mode, + &primary_layers[..], + SecondaryEndInfo { + ty: ViewConfigurationType::SECONDARY_MONO_FIRST_PERSON_OBSERVER_MSFT, + // XXXManishearth should we use the secondary layer's blend mode here, given + // that the content will be using the primary blend mode? + environment_blend_mode: data + .secondary_blend_mode + .unwrap_or(data.primary_blend_mode), + layers: &secondary_layers[..], + }, + ) + .map_err(|e| { + Error::BackendSpecific(format!("FrameStream::end_secondary {:?}", e)) + })?; + } else { + self.frame_stream + .end( + data.frame_state.as_ref().unwrap().predicted_display_time, + data.primary_blend_mode, + &primary_layers[..], + ) + .map_err(|e| Error::BackendSpecific(format!("FrameStream::end {:?}", e)))?; + } + Ok(()) + } + + fn begin_frame( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + layers: &[(ContextId, LayerId)], + ) -> Result, Error> { + let data_guard = self.shared_data.lock().unwrap(); + let data = data_guard.as_ref().unwrap(); + let openxr_layers = &mut self.openxr_layers; + let clearer = &mut self.clearer; + self.frame_stream + .begin() + .map_err(|e| Error::BackendSpecific(format!("FrameStream::begin {:?}", e)))?; + layers + .iter() + .map(|&(context_id, layer_id)| { + let context = contexts + .context(device, context_id) + .ok_or(Error::NoMatchingDevice)?; + let openxr_layer = openxr_layers + .get_mut(&layer_id) + .ok_or(Error::NoMatchingDevice)?; + + let image = openxr_layer.swapchain.acquire_image().map_err(|e| { + Error::BackendSpecific(format!("Swapchain::acquire_image {:?}", e)) + })?; + openxr_layer + .swapchain + .wait_image(openxr::Duration::INFINITE) + .map_err(|e| { + Error::BackendSpecific(format!("Swapchain::wait_image {:?}", e)) + })?; + openxr_layer.waited = true; + + let color_surface_texture = openxr_layer + .get_surface_texture(device, context, image as usize) + .map_err(|e| { + Error::BackendSpecific(format!("Layer::get_surface_texture {:?}", e)) + })?; + let color_texture = device.surface_texture_object(color_surface_texture); + let color_target = device.surface_gl_texture_target(); + let depth_stencil_texture = openxr_layer + .depth_stencil_texture + .map(|texture| texture.0.get()); + let texture_array_index = None; + let origin = Point2D::new(0, 0); + let texture_size = openxr_layer.size; + let sub_image = Some(SubImage { + color_texture, + depth_stencil_texture, + texture_array_index, + viewport: Rect::new(origin, texture_size), + }); + let view_sub_images = data + .viewports() + .viewports + .iter() + .map(|&viewport| SubImage { + color_texture, + depth_stencil_texture, + texture_array_index, + viewport, + }) + .collect(); + clearer.clear( + device, + contexts, + context_id, + layer_id, + NonZeroU32::new(color_texture).map(glow::NativeTexture), + color_target, + openxr_layer.depth_stencil_texture, + ); + Ok(SubImages { + layer_id, + sub_image, + view_sub_images, + }) + }) + .collect() + } +} + +fn image_rect(viewport: Rect) -> openxr::Rect2Di { + openxr::Rect2Di { + extent: openxr::Extent2Di { + height: viewport.size.height, + width: viewport.size.width, + }, + offset: openxr::Offset2Di { + x: viewport.origin.x, + y: viewport.origin.y, + }, + } +} + +impl OpenXrDevice { + fn new( + instance: CreatedInstance, + granted_features: Vec, + context_menu_provider: Option>, + grand_manager: LayerGrandManager, + ) -> Result { + let CreatedInstance { + instance, + supports_hands, + supports_secondary, + system, + supports_mutable_fov, + supported_interaction_profiles, + supports_passthrough, + supports_updating_framerate, + } = instance; + + let (init_tx, init_rx) = crossbeam_channel::unbounded(); + + let instance_clone = instance.clone(); + let shared_data = Arc::new(Mutex::new(None)); + let shared_data_clone = shared_data.clone(); + let mut data = shared_data.lock().unwrap(); + + let layer_manager = grand_manager.create_layer_manager(move |device, _| { + let (session, frame_waiter, frame_stream) = + GraphicsProvider::create_session(device, &instance_clone, system)?; + let (passthrough, passthrough_layer) = if supports_passthrough { + let flags = PassthroughFlagsFB::IS_RUNNING_AT_CREATION; + let purpose = PassthroughLayerPurposeFB::RECONSTRUCTION; + let passthrough = session + .create_passthrough(flags) + .expect("Unable to initialize passthrough"); + let passthrough_layer = session + .create_passthrough_layer(&passthrough, flags, purpose) + .expect("Failed to create passthrough layer"); + (Some(passthrough), Some(passthrough_layer)) + } else { + (None, None) + }; + let session = Arc::new(session); + init_tx + .send((session.clone(), frame_waiter)) + .map_err(|_| Error::CommunicationError)?; + Ok(OpenXrLayerManager::new( + session, + shared_data_clone, + frame_stream, + !supports_mutable_fov, + passthrough, + passthrough_layer, + )) + })?; + + let (session, frame_waiter) = init_rx.recv().map_err(|_| Error::CommunicationError)?; + + // XXXPaul initialisation should happen on SessionStateChanged(Ready)? + + if supports_secondary { + session + .begin_with_secondary( + ViewConfigurationType::PRIMARY_STEREO, + &[ViewConfigurationType::SECONDARY_MONO_FIRST_PERSON_OBSERVER_MSFT], + ) + .map_err(|e| { + Error::BackendSpecific(format!("Session::begin_with_secondary {:?}", e)) + })?; + } else { + session + .begin(ViewConfigurationType::PRIMARY_STEREO) + .map_err(|e| Error::BackendSpecific(format!("Session::begin {:?}", e)))?; + } + + let pose = Posef { + orientation: Quaternionf { + x: 0., + y: 0., + z: 0., + w: 1., + }, + position: Vector3f { + x: 0., + y: 0., + z: 0., + }, + }; + let space = session + .create_reference_space(ReferenceSpaceType::LOCAL, pose) + .map_err(|e| { + Error::BackendSpecific(format!("Session::create_reference_space {:?}", e)) + })?; + + let viewer_space = session + .create_reference_space(ReferenceSpaceType::VIEW, pose) + .map_err(|e| { + Error::BackendSpecific(format!("Session::create_reference_space {:?}", e)) + })?; + + let view_configuration_type = ViewConfigurationType::PRIMARY_STEREO; + let view_configurations = instance + .enumerate_view_configuration_views(system, view_configuration_type) + .map_err(|e| { + Error::BackendSpecific(format!( + "Session::enumerate_view_configuration_views {:?}", + e + )) + })?; + + let left_view_configuration = view_configurations[0]; + let right_view_configuration = view_configurations[1]; + let left_extent = Extent2Di { + width: left_view_configuration.recommended_image_rect_width as i32, + height: left_view_configuration.recommended_image_rect_height as i32, + }; + let right_extent = Extent2Di { + width: right_view_configuration.recommended_image_rect_width as i32, + height: right_view_configuration.recommended_image_rect_height as i32, + }; + + assert_eq!( + left_view_configuration.recommended_image_rect_height, + right_view_configuration.recommended_image_rect_height, + ); + + let swapchain_sample_count = left_view_configuration.recommended_swapchain_sample_count; + + let secondary_active = false; + let (secondary, secondary_blend_mode) = if supports_secondary { + let view_configuration = *instance + .enumerate_view_configuration_views( + system, + ViewConfigurationType::SECONDARY_MONO_FIRST_PERSON_OBSERVER_MSFT, + ) + .map_err(|e| { + Error::BackendSpecific(format!( + "Session::enumerate_view_configuration_views {:?}", + e + )) + })? + .get(0) + .expect( + "Session::enumerate_view_configuration_views() returned no secondary views", + ); + + let secondary_blend_mode = instance + .enumerate_environment_blend_modes( + system, + ViewConfigurationType::SECONDARY_MONO_FIRST_PERSON_OBSERVER_MSFT, + ) + .map_err(|e| { + Error::BackendSpecific(format!( + "Instance::enumerate_environment_blend_modes {:?}", + e + )) + })?[0]; + + let secondary_extent = Extent2Di { + width: view_configuration.recommended_image_rect_width as i32, + height: view_configuration.recommended_image_rect_height as i32, + }; + + let secondary = ViewInfo { + view: VIEW_INIT, + extent: secondary_extent, + cached_projection: Transform3D::identity(), + }; + + (Some(secondary), Some(secondary_blend_mode)) + } else { + (None, None) + }; + + let primary_blend_mode = instance + .enumerate_environment_blend_modes(system, view_configuration_type) + .map_err(|e| { + Error::BackendSpecific(format!( + "Instance::enumerate_environment_blend_modes {:?}", + e + )) + })?[0]; + + let left = ViewInfo { + view: VIEW_INIT, + extent: left_extent, + cached_projection: Transform3D::identity(), + }; + let right = ViewInfo { + view: VIEW_INIT, + extent: right_extent, + cached_projection: Transform3D::identity(), + }; + *data = Some(SharedData { + frame_state: None, + space, + left, + right, + secondary, + secondary_active, + primary_blend_mode, + secondary_blend_mode, + swapchain_sample_count, + }); + drop(data); + + let (action_set, right_hand, left_hand) = OpenXRInput::setup_inputs( + &instance, + &session, + supports_hands, + supported_interaction_profiles, + ); + + Ok(OpenXrDevice { + instance, + events: Default::default(), + session, + frame_waiter, + viewer_space, + clip_planes: Default::default(), + supports_secondary, + supports_mutable_fov, + supports_updating_framerate, + layer_manager, + shared_data, + + action_set, + right_hand, + left_hand, + granted_features, + context_menu_provider, + context_menu_future: None, + }) + } + + fn handle_openxr_events(&mut self) -> bool { + use openxr::Event::*; + let mut stopped = false; + loop { + let mut buffer = openxr::EventDataBuffer::new(); + let event = match self.instance.poll_event(&mut buffer) { + Ok(event) => event, + Err(e) => { + error!("Error polling events: {:?}", e); + return false; + } + }; + match event { + Some(SessionStateChanged(session_change)) => match session_change.state() { + openxr::SessionState::EXITING | openxr::SessionState::LOSS_PENDING => { + self.events.callback(Event::SessionEnd); + return false; + } + openxr::SessionState::STOPPING => { + self.events + .callback(Event::VisibilityChange(Visibility::Hidden)); + if let Err(e) = self.session.end() { + error!("Session failed to end on STOPPING: {:?}", e); + } + stopped = true; + } + openxr::SessionState::READY if stopped => { + self.events + .callback(Event::VisibilityChange(Visibility::Visible)); + if let Err(e) = self.session.begin(ViewConfigurationType::PRIMARY_STEREO) { + error!("Session failed to begin on READY: {:?}", e); + } + stopped = false; + } + openxr::SessionState::FOCUSED => { + self.events + .callback(Event::VisibilityChange(Visibility::Visible)); + } + openxr::SessionState::VISIBLE => { + self.events + .callback(Event::VisibilityChange(Visibility::VisibleBlurred)); + } + _ => { + // FIXME: Handle other states + } + }, + Some(InstanceLossPending(_)) => { + self.events.callback(Event::SessionEnd); + return false; + } + Some(InteractionProfileChanged(_)) => { + let path = self.instance.string_to_path("/user/hand/right").unwrap(); + let profile_path = self.session.current_interaction_profile(path).unwrap(); + let profile = self.instance.path_to_string(profile_path); + + match profile { + Ok(profile) => { + let profiles = get_profiles_from_path(profile) + .iter() + .map(|s| s.to_string()) + .collect(); + + let mut new_left = self.left_hand.input_source(); + new_left.profiles.clone_from(&profiles); + self.events + .callback(Event::UpdateInput(new_left.id, new_left)); + + let mut new_right = self.right_hand.input_source(); + new_right.profiles.clone_from(&profiles); + self.events + .callback(Event::UpdateInput(new_right.id, new_right)); + } + Err(e) => { + error!("Failed to get interaction profile: {:?}", e); + } + } + } + Some(ReferenceSpaceChangePending(e)) => { + let base_space = match e.reference_space_type() { + ReferenceSpaceType::VIEW => BaseSpace::Viewer, + ReferenceSpaceType::LOCAL => BaseSpace::Local, + ReferenceSpaceType::LOCAL_FLOOR => BaseSpace::Floor, + ReferenceSpaceType::STAGE => BaseSpace::BoundedFloor, + _ => unreachable!( + "Should not be receiving change events for unsupported space types" + ), + }; + let transform = transform(&e.pose_in_previous_space()); + self.events + .callback(Event::ReferenceSpaceChanged(base_space, transform)); + } + Some(_) => { + // FIXME: Handle other events + } + None if stopped => { + // XXXManishearth be able to handle exits during this time + thread::sleep(Duration::from_millis(200)); + } + None => { + // No more events to process + break; + } + } + } + true + } +} + +impl SharedData { + fn views(&self) -> Views { + let left_view = self.left.view(); + let right_view = self.right.view(); + if let (Some(secondary), true) = (self.secondary.as_ref(), self.secondary_active) { + // Note: we report the secondary view only when it is active + let third_eye = secondary.view(); + return Views::StereoCapture(left_view, right_view, third_eye); + } + Views::Stereo(left_view, right_view) + } + + fn viewports(&self) -> Viewports { + let left_vp = Rect::new( + Point2D::zero(), + Size2D::new(self.left.extent.width, self.left.extent.height), + ); + let right_vp = Rect::new( + Point2D::new(self.left.extent.width, 0), + Size2D::new(self.right.extent.width, self.right.extent.height), + ); + let mut viewports = vec![left_vp, right_vp]; + // Note: we report the secondary viewport even when it is inactive + if let Some(ref secondary) = self.secondary { + let secondary_vp = Rect::new( + Point2D::new(self.left.extent.width + self.right.extent.width, 0), + Size2D::new(secondary.extent.width, secondary.extent.height) + / SECONDARY_VIEW_DOWNSCALE, + ); + viewports.push(secondary_vp) + } + Viewports { viewports } + } +} + +impl DeviceAPI for OpenXrDevice { + fn floor_transform(&self) -> Option> { + let translation = Vector3D::new(0.0, HEIGHT, 0.0); + Some(RigidTransform3D::from_translation(translation)) + } + + fn viewports(&self) -> Viewports { + self.shared_data + .lock() + .unwrap() + .as_ref() + .unwrap() + .viewports() + } + + fn create_layer(&mut self, context_id: ContextId, init: LayerInit) -> Result { + self.layer_manager.create_layer(context_id, init) + } + + fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId) { + self.layer_manager.destroy_layer(context_id, layer_id) + } + + fn begin_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Option { + if !self.handle_openxr_events() { + warn!("no frame, session isn't running"); + // Session is not running anymore. + return None; + } + if let Some(ref context_menu_future) = self.context_menu_future { + match context_menu_future.poll() { + ContextMenuResult::ExitSession => { + self.quit(); + return None; + } + ContextMenuResult::Dismissed => self.context_menu_future = None, + ContextMenuResult::Pending => (), + } + } + + let (frame_state, secondary_state) = if self.supports_secondary { + let (frame_state, secondary_state) = match self.frame_waiter.wait_secondary() { + Ok(frame_state) => frame_state, + Err(e) => { + error!("Error waiting on frame: {:?}", e); + return None; + } + }; + + assert_eq!( + secondary_state.ty, + ViewConfigurationType::SECONDARY_MONO_FIRST_PERSON_OBSERVER_MSFT + ); + (frame_state, Some(secondary_state)) + } else { + match self.frame_waiter.wait() { + Ok(frame_state) => (frame_state, None), + Err(e) => { + error!("Error waiting on frame: {:?}", e); + return None; + } + } + }; + + // We get the subimages before grabbing the lock, + // since otherwise we'll deadlock + let sub_images = self.layer_manager.begin_frame(layers).ok()?; + + let mut guard = self.shared_data.lock().unwrap(); + let data = guard.as_mut().unwrap(); + + // XXXManishearth should we check frame_state.should_render? + let (_view_flags, mut views) = match self.session.locate_views( + ViewConfigurationType::PRIMARY_STEREO, + frame_state.predicted_display_time, + &data.space, + ) { + Ok(data) => data, + Err(e) => { + error!("Error locating views: {:?}", e); + return None; + } + }; + if !self.supports_mutable_fov { + views.iter_mut().for_each(|v| { + std::mem::swap(&mut v.fov.angle_up, &mut v.fov.angle_down); + }); + } + data.left.set_view(views[0], self.clip_planes); + data.right.set_view(views[1], self.clip_planes); + let pose = match self + .viewer_space + .locate(&data.space, frame_state.predicted_display_time) + { + Ok(pose) => pose, + Err(e) => { + error!("Error locating viewer space: {:?}", e); + return None; + } + }; + let transform = transform(&pose.pose); + + if let Some(secondary_state) = secondary_state.as_ref() { + data.secondary_active = secondary_state.active; + } + if let (Some(secondary), true) = (data.secondary.as_mut(), data.secondary_active) { + let view = match self.session.locate_views( + ViewConfigurationType::SECONDARY_MONO_FIRST_PERSON_OBSERVER_MSFT, + frame_state.predicted_display_time, + &data.space, + ) { + Ok(v) => v.1[0], + Err(e) => { + error!("Error locating views: {:?}", e); + return None; + } + }; + secondary.set_view(view, self.clip_planes); + } + + let active_action_set = ActiveActionSet::new(&self.action_set); + + if let Err(e) = self.session.sync_actions(&[active_action_set]) { + error!("Error syncing actions: {:?}", e); + return None; + } + + let mut right = self + .right_hand + .frame(&self.session, &frame_state, &data.space, &transform); + let mut left = self + .left_hand + .frame(&self.session, &frame_state, &data.space, &transform); + + data.frame_state = Some(frame_state); + let views = data.views(); + + if let Some(ref context_menu_provider) = self.context_menu_provider { + if (left.menu_selected || right.menu_selected) && self.context_menu_future.is_none() { + self.context_menu_future = Some(context_menu_provider.open_context_menu()); + } else if self.context_menu_future.is_some() { + // Do not surface input info whilst the context menu is open + // We don't do this for the first frame after the context menu is opened + // so that the appropriate select cancel events may fire + right.frame.target_ray_origin = None; + right.frame.grip_origin = None; + left.frame.target_ray_origin = None; + left.frame.grip_origin = None; + right.select = None; + right.squeeze = None; + left.select = None; + left.squeeze = None; + } + } + + let left_input_changed = left.frame.input_changed; + let right_input_changed = right.frame.input_changed; + + let frame = Frame { + pose: Some(ViewerPose { transform, views }), + inputs: vec![right.frame, left.frame], + events: vec![], + sub_images, + hit_test_results: vec![], + predicted_display_time: frame_state.predicted_display_time.as_nanos() as f64, + }; + + if let Some(right_select) = right.select { + self.events.callback(Event::Select( + InputId(0), + SelectKind::Select, + right_select, + frame.clone(), + )); + } + if let Some(right_squeeze) = right.squeeze { + self.events.callback(Event::Select( + InputId(0), + SelectKind::Squeeze, + right_squeeze, + frame.clone(), + )); + } + if let Some(left_select) = left.select { + self.events.callback(Event::Select( + InputId(1), + SelectKind::Select, + left_select, + frame.clone(), + )); + } + if let Some(left_squeeze) = left.squeeze { + self.events.callback(Event::Select( + InputId(1), + SelectKind::Squeeze, + left_squeeze, + frame.clone(), + )); + } + if left_input_changed { + self.events + .callback(Event::InputChanged(InputId(1), frame.inputs[1].clone())) + } + if right_input_changed { + self.events + .callback(Event::InputChanged(InputId(0), frame.inputs[0].clone())) + } + Some(frame) + } + + fn end_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) { + // We tell OpenXR to display the frame in the layer manager. + // Due to threading issues we can't call D3D11 APIs on the openxr thread as the + // WebGL thread might be using the device simultaneously, so this method delegates + // everything to the layer manager. + let _ = self.layer_manager.end_frame(layers); + } + + fn initial_inputs(&self) -> Vec { + vec![ + self.right_hand.input_source(), + self.left_hand.input_source(), + ] + } + + fn set_event_dest(&mut self, dest: Sender) { + self.events.upgrade(dest) + } + + fn quit(&mut self) { + self.session.request_exit().unwrap(); + loop { + let mut buffer = openxr::EventDataBuffer::new(); + let event = match self.instance.poll_event(&mut buffer) { + Ok(e) => e, + Err(e) => { + error!("Error polling for event while quitting: {:?}", e); + break; + } + }; + match event { + Some(openxr::Event::SessionStateChanged(session_change)) => { + match session_change.state() { + openxr::SessionState::EXITING => { + break; + } + openxr::SessionState::STOPPING => { + if let Err(e) = self.session.end() { + error!("Session failed to end while STOPPING: {:?}", e); + } + } + _ => (), + } + } + _ => (), + } + thread::sleep(Duration::from_millis(30)); + } + self.events.callback(Event::SessionEnd); + // We clear this data to remove the outstanding reference to XrSpace, + // which keeps other OpenXR objects alive. + *self.shared_data.lock().unwrap() = None; + } + + fn set_quitter(&mut self, _: Quitter) { + // the quitter is only needed if we have anything from outside the render + // thread that can signal a quit. We don't. + } + + fn update_clip_planes(&mut self, near: f32, far: f32) { + self.clip_planes.update(near, far); + } + + fn environment_blend_mode(&self) -> webxr_api::EnvironmentBlendMode { + match self + .shared_data + .lock() + .unwrap() + .as_ref() + .unwrap() + .primary_blend_mode + { + EnvironmentBlendMode::OPAQUE => webxr_api::EnvironmentBlendMode::Opaque, + EnvironmentBlendMode::ALPHA_BLEND => webxr_api::EnvironmentBlendMode::AlphaBlend, + EnvironmentBlendMode::ADDITIVE => webxr_api::EnvironmentBlendMode::Additive, + v => unimplemented!("unsupported blend mode: {:?}", v), + } + } + + fn granted_features(&self) -> &[String] { + &self.granted_features + } + + fn update_frame_rate(&mut self, rate: f32) -> f32 { + if self.supports_updating_framerate { + self.session + .request_display_refresh_rate(rate) + .expect("Failed to request display refresh rate"); + self.session + .get_display_refresh_rate() + .expect("Failed to get display refresh rate") + } else { + -1.0 + } + } + + fn supported_frame_rates(&self) -> Vec { + if self.supports_updating_framerate { + self.session + .enumerate_display_refresh_rates() + .expect("Failed to enumerate display refresh rates") + } else { + vec![] + } + } + + fn reference_space_bounds(&self) -> Option>> { + match self + .session + .reference_space_bounds_rect(ReferenceSpaceType::STAGE) + { + Ok(bounds) => { + if let Some(bounds) = bounds { + let point1 = Point2D::new(-bounds.width / 2., -bounds.height / 2.); + let point2 = Point2D::new(-bounds.width / 2., bounds.height / 2.); + let point3 = Point2D::new(bounds.width / 2., bounds.height / 2.); + let point4 = Point2D::new(bounds.width / 2., -bounds.height / 2.); + Some(vec![point1, point2, point3, point4]) + } else { + None + } + } + Err(_) => None, + } + } +} + +fn transform(pose: &Posef) -> RigidTransform3D { + let rotation = Rotation3D::quaternion( + pose.orientation.x, + pose.orientation.y, + pose.orientation.z, + pose.orientation.w, + ); + let translation = Vector3D::new(pose.position.x, pose.position.y, pose.position.z); + RigidTransform3D::new(rotation, translation) +} + +#[inline] +fn fov_to_projection_matrix(fov: &Fovf, clip_planes: ClipPlanes) -> Transform3D { + util::fov_to_projection_matrix( + fov.angle_left, + fov.angle_right, + fov.angle_up, + fov.angle_down, + clip_planes, + ) +} diff --git a/components/webxr/surfman_layer_manager.rs b/components/webxr/surfman_layer_manager.rs new file mode 100644 index 00000000000..33ef961b4a8 --- /dev/null +++ b/components/webxr/surfman_layer_manager.rs @@ -0,0 +1,234 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +//! An implementation of layer management using surfman + +use crate::gl_utils::GlClearer; +use euclid::{Point2D, Rect, Size2D}; +use glow::{self as gl, Context as Gl, HasContext, PixelUnpackData}; +use std::collections::HashMap; +use std::num::NonZeroU32; +use surfman::chains::{PreserveBuffer, SwapChains, SwapChainsAPI}; +use surfman::{Context as SurfmanContext, Device as SurfmanDevice, SurfaceAccess, SurfaceTexture}; +use webxr_api::{ + ContextId, Error, GLContexts, GLTypes, LayerId, LayerInit, LayerManagerAPI, SubImage, + SubImages, Viewports, +}; + +#[derive(Copy, Clone, Debug)] +pub enum SurfmanGL {} + +impl GLTypes for SurfmanGL { + type Device = SurfmanDevice; + type Context = SurfmanContext; + type Bindings = Gl; +} + +pub struct SurfmanLayerManager { + layers: Vec<(ContextId, LayerId)>, + swap_chains: SwapChains, + surface_textures: HashMap, + depth_stencil_textures: HashMap>, + viewports: Viewports, + clearer: GlClearer, +} + +impl SurfmanLayerManager { + pub fn new( + viewports: Viewports, + swap_chains: SwapChains, + ) -> SurfmanLayerManager { + let layers = Vec::new(); + let surface_textures = HashMap::new(); + let depth_stencil_textures = HashMap::new(); + let clearer = GlClearer::new(false); + SurfmanLayerManager { + layers, + swap_chains, + surface_textures, + depth_stencil_textures, + viewports, + clearer, + } + } +} + +impl LayerManagerAPI for SurfmanLayerManager { + fn create_layer( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + context_id: ContextId, + init: LayerInit, + ) -> Result { + let texture_size = init.texture_size(&self.viewports); + let layer_id = LayerId::new(); + let access = SurfaceAccess::GPUOnly; + let size = texture_size.to_untyped(); + // TODO: Treat depth and stencil separately? + let has_depth_stencil = match init { + LayerInit::WebGLLayer { stencil, depth, .. } => stencil | depth, + LayerInit::ProjectionLayer { stencil, depth, .. } => stencil | depth, + }; + if has_depth_stencil { + let gl = contexts + .bindings(device, context_id) + .ok_or(Error::NoMatchingDevice)?; + let depth_stencil_texture = unsafe { gl.create_texture().ok() }; + unsafe { + gl.bind_texture(gl::TEXTURE_2D, depth_stencil_texture); + gl.tex_image_2d( + gl::TEXTURE_2D, + 0, + gl::DEPTH24_STENCIL8 as _, + size.width, + size.height, + 0, + gl::DEPTH_STENCIL, + gl::UNSIGNED_INT_24_8, + PixelUnpackData::Slice(None), + ); + } + self.depth_stencil_textures + .insert(layer_id, depth_stencil_texture); + } + let context = contexts + .context(device, context_id) + .ok_or(Error::NoMatchingDevice)?; + self.swap_chains + .create_detached_swap_chain(layer_id, size, device, context, access) + .map_err(|err| Error::BackendSpecific(format!("{:?}", err)))?; + self.layers.push((context_id, layer_id)); + Ok(layer_id) + } + + fn destroy_layer( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + context_id: ContextId, + layer_id: LayerId, + ) { + self.clearer + .destroy_layer(device, contexts, context_id, layer_id); + let context = match contexts.context(device, context_id) { + Some(context) => context, + None => return, + }; + self.layers.retain(|&ids| ids != (context_id, layer_id)); + let _ = self.swap_chains.destroy(layer_id, device, context); + self.surface_textures.remove(&layer_id); + if let Some(depth_stencil_texture) = self.depth_stencil_textures.remove(&layer_id) { + let gl = contexts.bindings(device, context_id).unwrap(); + if let Some(depth_stencil_texture) = depth_stencil_texture { + unsafe { + gl.delete_texture(depth_stencil_texture); + } + } + } + } + + fn layers(&self) -> &[(ContextId, LayerId)] { + &self.layers[..] + } + + fn begin_frame( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + layers: &[(ContextId, LayerId)], + ) -> Result, Error> { + layers + .iter() + .map(|&(context_id, layer_id)| { + let context = contexts + .context(device, context_id) + .ok_or(Error::NoMatchingDevice)?; + let swap_chain = self + .swap_chains + .get(layer_id) + .ok_or(Error::NoMatchingDevice)?; + let surface_size = Size2D::from_untyped(swap_chain.size()); + let surface_texture = swap_chain + .take_surface_texture(device, context) + .map_err(|_| Error::NoMatchingDevice)?; + let color_texture = device.surface_texture_object(&surface_texture); + let color_target = device.surface_gl_texture_target(); + let depth_stencil_texture = self + .depth_stencil_textures + .get(&layer_id) + .cloned() + .flatten(); + let texture_array_index = None; + let origin = Point2D::new(0, 0); + let sub_image = Some(SubImage { + color_texture, + depth_stencil_texture: depth_stencil_texture.map(|nt| nt.0.get()), + texture_array_index, + viewport: Rect::new(origin, surface_size), + }); + let view_sub_images = self + .viewports + .viewports + .iter() + .map(|&viewport| SubImage { + color_texture, + depth_stencil_texture: depth_stencil_texture.map(|texture| texture.0.get()), + texture_array_index, + viewport, + }) + .collect(); + self.surface_textures.insert(layer_id, surface_texture); + self.clearer.clear( + device, + contexts, + context_id, + layer_id, + NonZeroU32::new(color_texture).map(gl::NativeTexture), + color_target, + depth_stencil_texture, + ); + Ok(SubImages { + layer_id, + sub_image, + view_sub_images, + }) + }) + .collect() + } + + fn end_frame( + &mut self, + device: &mut SurfmanDevice, + contexts: &mut dyn GLContexts, + layers: &[(ContextId, LayerId)], + ) -> Result<(), Error> { + for &(context_id, layer_id) in layers { + let gl = contexts + .bindings(device, context_id) + .ok_or(Error::NoMatchingDevice)?; + unsafe { + gl.flush(); + } + let context = contexts + .context(device, context_id) + .ok_or(Error::NoMatchingDevice)?; + let surface_texture = self + .surface_textures + .remove(&layer_id) + .ok_or(Error::NoMatchingDevice)?; + let swap_chain = self + .swap_chains + .get(layer_id) + .ok_or(Error::NoMatchingDevice)?; + swap_chain + .recycle_surface_texture(device, context, surface_texture) + .map_err(|err| Error::BackendSpecific(format!("{:?}", err)))?; + swap_chain + .swap_buffers(device, context, PreserveBuffer::No) + .map_err(|err| Error::BackendSpecific(format!("{:?}", err)))?; + } + Ok(()) + } +} diff --git a/servo-tidy.toml b/servo-tidy.toml index 76c6c016462..355350ba0be 100644 --- a/servo-tidy.toml +++ b/servo-tidy.toml @@ -29,6 +29,9 @@ files = [ ] # Directories that are ignored for the non-WPT tidy check. directories = [ + # Ignored until these files are fully integrated into the workspace build. + "./components/webxr", + "./components/shared/webxr", # Test have expectations in them, causing tidy to fail. "./support/crown/tests", # Upstream