Merge webxr repository (#35228)

Signed-off-by: Martin Robinson <mrobinson@igalia.com>
This commit is contained in:
Martin Robinson 2025-01-30 20:07:35 +01:00 committed by GitHub
parent 64b40ea700
commit 534e78db53
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 7303 additions and 2 deletions

2
Cargo.lock generated
View file

@ -8488,7 +8488,6 @@ dependencies = [
[[package]]
name = "webxr"
version = "0.0.1"
source = "git+https://github.com/servo/webxr#4fd38cf6dd29ac58bafd0be2ef5337827853dcdd"
dependencies = [
"crossbeam-channel",
"euclid",
@ -8506,7 +8505,6 @@ dependencies = [
[[package]]
name = "webxr-api"
version = "0.0.1"
source = "git+https://github.com/servo/webxr#4fd38cf6dd29ac58bafd0be2ef5337827853dcdd"
dependencies = [
"euclid",
"ipc-channel",

View file

@ -230,3 +230,7 @@ codegen-units = 1
#
# [patch."https://github.com/servo/<repository>"]
# <crate> = { path = "/path/to/local/checkout" }
[patch."https://github.com/servo/webxr"]
webxr = { path = "components/webxr" }
webxr-api = { path = "components/shared/webxr" }

View file

@ -0,0 +1,28 @@
[package]
name = "webxr-api"
version = "0.0.1"
authors = ["The Servo Project Developers"]
edition = "2018"
homepage = "https://github.com/servo/webxr"
repository = "https://github.com/servo/webxr"
keywords = ["ar", "headset", "openxr", "vr", "webxr"]
license = "MPL-2.0"
description = '''A safe Rust API that provides a way to interact with
virtual reality and augmented reality devices and integration with OpenXR.
The API is inspired by the WebXR Device API (https://www.w3.org/TR/webxr/)
but adapted to Rust design patterns.'''
[lib]
path = "lib.rs"
[features]
ipc = ["serde", "ipc-channel", "euclid/serde"]
[dependencies]
euclid = "0.22"
ipc-channel = { version = "0.19", optional = true }
log = "0.4"
serde = { version = "1.0", optional = true }
time = { version = "0.1", optional = true }

View file

@ -0,0 +1,114 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Traits to be implemented by backends
use crate::ContextId;
use crate::EnvironmentBlendMode;
use crate::Error;
use crate::Event;
use crate::Floor;
use crate::Frame;
use crate::HitTestId;
use crate::HitTestSource;
use crate::InputSource;
use crate::LayerId;
use crate::LayerInit;
use crate::Native;
use crate::Quitter;
use crate::Sender;
use crate::Session;
use crate::SessionBuilder;
use crate::SessionInit;
use crate::SessionMode;
use crate::Viewports;
use euclid::{Point2D, RigidTransform3D};
/// A trait for discovering XR devices
pub trait DiscoveryAPI<GL>: 'static {
fn request_session(
&mut self,
mode: SessionMode,
init: &SessionInit,
xr: SessionBuilder<GL>,
) -> Result<Session, Error>;
fn supports_session(&self, mode: SessionMode) -> bool;
}
/// A trait for using an XR device
pub trait DeviceAPI: 'static {
/// Create a new layer
fn create_layer(&mut self, context_id: ContextId, init: LayerInit) -> Result<LayerId, Error>;
/// Destroy a layer
fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId);
/// The transform from native coordinates to the floor.
fn floor_transform(&self) -> Option<RigidTransform3D<f32, Native, Floor>>;
fn viewports(&self) -> Viewports;
/// Begin an animation frame.
fn begin_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Option<Frame>;
/// End an animation frame, render the layer to the device, and block waiting for the next frame.
fn end_animation_frame(&mut self, layers: &[(ContextId, LayerId)]);
/// Inputs registered with the device on initialization. More may be added, which
/// should be communicated through a yet-undecided event mechanism
fn initial_inputs(&self) -> Vec<InputSource>;
/// Sets the event handling channel
fn set_event_dest(&mut self, dest: Sender<Event>);
/// Quit the session
fn quit(&mut self);
fn set_quitter(&mut self, quitter: Quitter);
fn update_clip_planes(&mut self, near: f32, far: f32);
fn environment_blend_mode(&self) -> EnvironmentBlendMode {
// for VR devices, override for AR
EnvironmentBlendMode::Opaque
}
fn granted_features(&self) -> &[String];
fn request_hit_test(&mut self, _source: HitTestSource) {
panic!("This device does not support requesting hit tests");
}
fn cancel_hit_test(&mut self, _id: HitTestId) {
panic!("This device does not support hit tests");
}
fn update_frame_rate(&mut self, rate: f32) -> f32 {
rate
}
fn supported_frame_rates(&self) -> Vec<f32> {
Vec::new()
}
fn reference_space_bounds(&self) -> Option<Vec<Point2D<f32, Floor>>> {
None
}
}
impl<GL: 'static> DiscoveryAPI<GL> for Box<dyn DiscoveryAPI<GL>> {
fn request_session(
&mut self,
mode: SessionMode,
init: &SessionInit,
xr: SessionBuilder<GL>,
) -> Result<Session, Error> {
(&mut **self).request_session(mode, init, xr)
}
fn supports_session(&self, mode: SessionMode) -> bool {
(&**self).supports_session(mode)
}
}

View file

@ -0,0 +1,21 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#[cfg(feature = "ipc")]
use serde::{Deserialize, Serialize};
/// Errors that can be produced by XR.
// TODO: this is currently incomplete!
#[derive(Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Error {
NoMatchingDevice,
CommunicationError,
ThreadCreationError,
InlineSession,
UnsupportedFeature(String),
BackendSpecific(String),
}

View file

@ -0,0 +1,80 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use euclid::RigidTransform3D;
use crate::ApiSpace;
use crate::BaseSpace;
use crate::Frame;
use crate::InputFrame;
use crate::InputId;
use crate::InputSource;
use crate::SelectEvent;
use crate::SelectKind;
use crate::Sender;
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum Event {
/// Input source connected
AddInput(InputSource),
/// Input source disconnected
RemoveInput(InputId),
/// Input updated (this is a disconnect+reconnect)
UpdateInput(InputId, InputSource),
/// Session ended by device
SessionEnd,
/// Session focused/blurred/etc
VisibilityChange(Visibility),
/// Selection started / ended
Select(InputId, SelectKind, SelectEvent, Frame),
/// Input from an input source has changed
InputChanged(InputId, InputFrame),
/// Reference space has changed
ReferenceSpaceChanged(BaseSpace, RigidTransform3D<f32, ApiSpace, ApiSpace>),
}
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum Visibility {
/// Session fully displayed to user
Visible,
/// Session still visible, but is not the primary focus
VisibleBlurred,
/// Session not visible
Hidden,
}
/// Convenience structure for buffering up events
/// when no event callback has been set
pub enum EventBuffer {
Buffered(Vec<Event>),
Sink(Sender<Event>),
}
impl Default for EventBuffer {
fn default() -> Self {
EventBuffer::Buffered(vec![])
}
}
impl EventBuffer {
pub fn callback(&mut self, event: Event) {
match *self {
EventBuffer::Buffered(ref mut events) => events.push(event),
EventBuffer::Sink(ref dest) => {
let _ = dest.send(event);
}
}
}
pub fn upgrade(&mut self, dest: Sender<Event>) {
if let EventBuffer::Buffered(ref mut events) = *self {
for event in events.drain(..) {
let _ = dest.send(event);
}
}
*self = EventBuffer::Sink(dest)
}
}

View file

@ -0,0 +1,60 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::Floor;
use crate::HitTestId;
use crate::HitTestResult;
use crate::InputFrame;
use crate::Native;
use crate::SubImages;
use crate::Viewer;
use crate::Viewports;
use crate::Views;
use euclid::RigidTransform3D;
/// The per-frame data that is provided by the device.
/// https://www.w3.org/TR/webxr/#xrframe
// TODO: other fields?
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct Frame {
/// The pose information of the viewer
pub pose: Option<ViewerPose>,
/// Frame information for each connected input source
pub inputs: Vec<InputFrame>,
/// Events that occur with the frame.
pub events: Vec<FrameUpdateEvent>,
/// The subimages to render to
pub sub_images: Vec<SubImages>,
/// The hit test results for this frame, if any
pub hit_test_results: Vec<HitTestResult>,
/// The average point in time this XRFrame is expected to be displayed on the devices' display
pub predicted_display_time: f64,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum FrameUpdateEvent {
UpdateFloorTransform(Option<RigidTransform3D<f32, Native, Floor>>),
UpdateViewports(Viewports),
HitTestSourceAdded(HitTestId),
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct ViewerPose {
/// The transform from the viewer to native coordinates
///
/// This is equivalent to the pose of the viewer in native coordinates.
/// This is the inverse of the view matrix.
pub transform: RigidTransform3D<f32, Viewer, Native>,
// The various views
pub views: Views,
}

View file

@ -0,0 +1,122 @@
use crate::Native;
use euclid::RigidTransform3D;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct HandSpace;
#[derive(Clone, Debug, Default)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct Hand<J> {
pub wrist: Option<J>,
pub thumb_metacarpal: Option<J>,
pub thumb_phalanx_proximal: Option<J>,
pub thumb_phalanx_distal: Option<J>,
pub thumb_phalanx_tip: Option<J>,
pub index: Finger<J>,
pub middle: Finger<J>,
pub ring: Finger<J>,
pub little: Finger<J>,
}
#[derive(Clone, Debug, Default)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct Finger<J> {
pub metacarpal: Option<J>,
pub phalanx_proximal: Option<J>,
pub phalanx_intermediate: Option<J>,
pub phalanx_distal: Option<J>,
pub phalanx_tip: Option<J>,
}
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct JointFrame {
pub pose: RigidTransform3D<f32, HandSpace, Native>,
pub radius: f32,
}
impl Default for JointFrame {
fn default() -> Self {
Self {
pose: RigidTransform3D::identity(),
radius: 0.,
}
}
}
impl<J> Hand<J> {
pub fn map<R>(&self, map: impl (Fn(&Option<J>, Joint) -> Option<R>) + Copy) -> Hand<R> {
Hand {
wrist: map(&self.wrist, Joint::Wrist),
thumb_metacarpal: map(&self.thumb_metacarpal, Joint::ThumbMetacarpal),
thumb_phalanx_proximal: map(&self.thumb_phalanx_proximal, Joint::ThumbPhalanxProximal),
thumb_phalanx_distal: map(&self.thumb_phalanx_distal, Joint::ThumbPhalanxDistal),
thumb_phalanx_tip: map(&self.thumb_phalanx_tip, Joint::ThumbPhalanxTip),
index: self.index.map(|f, j| map(f, Joint::Index(j))),
middle: self.middle.map(|f, j| map(f, Joint::Middle(j))),
ring: self.ring.map(|f, j| map(f, Joint::Ring(j))),
little: self.little.map(|f, j| map(f, Joint::Little(j))),
}
}
pub fn get(&self, joint: Joint) -> Option<&J> {
match joint {
Joint::Wrist => self.wrist.as_ref(),
Joint::ThumbMetacarpal => self.thumb_metacarpal.as_ref(),
Joint::ThumbPhalanxProximal => self.thumb_phalanx_proximal.as_ref(),
Joint::ThumbPhalanxDistal => self.thumb_phalanx_distal.as_ref(),
Joint::ThumbPhalanxTip => self.thumb_phalanx_tip.as_ref(),
Joint::Index(f) => self.index.get(f),
Joint::Middle(f) => self.middle.get(f),
Joint::Ring(f) => self.ring.get(f),
Joint::Little(f) => self.little.get(f),
}
}
}
impl<J> Finger<J> {
pub fn map<R>(&self, map: impl (Fn(&Option<J>, FingerJoint) -> Option<R>) + Copy) -> Finger<R> {
Finger {
metacarpal: map(&self.metacarpal, FingerJoint::Metacarpal),
phalanx_proximal: map(&self.phalanx_proximal, FingerJoint::PhalanxProximal),
phalanx_intermediate: map(&self.phalanx_intermediate, FingerJoint::PhalanxIntermediate),
phalanx_distal: map(&self.phalanx_distal, FingerJoint::PhalanxDistal),
phalanx_tip: map(&self.phalanx_tip, FingerJoint::PhalanxTip),
}
}
pub fn get(&self, joint: FingerJoint) -> Option<&J> {
match joint {
FingerJoint::Metacarpal => self.metacarpal.as_ref(),
FingerJoint::PhalanxProximal => self.phalanx_proximal.as_ref(),
FingerJoint::PhalanxIntermediate => self.phalanx_intermediate.as_ref(),
FingerJoint::PhalanxDistal => self.phalanx_distal.as_ref(),
FingerJoint::PhalanxTip => self.phalanx_tip.as_ref(),
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum FingerJoint {
Metacarpal,
PhalanxProximal,
PhalanxIntermediate,
PhalanxDistal,
PhalanxTip,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum Joint {
Wrist,
ThumbMetacarpal,
ThumbPhalanxProximal,
ThumbPhalanxDistal,
ThumbPhalanxTip,
Index(FingerJoint),
Middle(FingerJoint),
Ring(FingerJoint),
Little(FingerJoint),
}

View file

@ -0,0 +1,179 @@
use crate::ApiSpace;
use crate::Native;
use crate::Space;
use euclid::Point3D;
use euclid::RigidTransform3D;
use euclid::Rotation3D;
use euclid::Vector3D;
use std::f32::EPSILON;
use std::iter::FromIterator;
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
/// https://immersive-web.github.io/hit-test/#xrray
pub struct Ray<Space> {
/// The origin of the ray
pub origin: Vector3D<f32, Space>,
/// The direction of the ray. Must be normalized.
pub direction: Vector3D<f32, Space>,
}
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
/// https://immersive-web.github.io/hit-test/#enumdef-xrhittesttrackabletype
pub enum EntityType {
Point,
Plane,
Mesh,
}
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
/// https://immersive-web.github.io/hit-test/#dictdef-xrhittestoptionsinit
pub struct HitTestSource {
pub id: HitTestId,
pub space: Space,
pub ray: Ray<ApiSpace>,
pub types: EntityTypes,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct HitTestId(pub u32);
#[derive(Copy, Clone, Debug, Default)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
/// Vec<EntityType>, but better
pub struct EntityTypes {
pub point: bool,
pub plane: bool,
pub mesh: bool,
}
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct HitTestResult {
pub id: HitTestId,
pub space: RigidTransform3D<f32, HitTestSpace, Native>,
}
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
/// The coordinate space of a hit test result
pub struct HitTestSpace;
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct Triangle {
pub first: Point3D<f32, Native>,
pub second: Point3D<f32, Native>,
pub third: Point3D<f32, Native>,
}
impl EntityTypes {
pub fn is_type(self, ty: EntityType) -> bool {
match ty {
EntityType::Point => self.point,
EntityType::Plane => self.plane,
EntityType::Mesh => self.mesh,
}
}
pub fn add_type(&mut self, ty: EntityType) {
match ty {
EntityType::Point => self.point = true,
EntityType::Plane => self.plane = true,
EntityType::Mesh => self.mesh = true,
}
}
}
impl FromIterator<EntityType> for EntityTypes {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = EntityType>,
{
iter.into_iter().fold(Default::default(), |mut acc, e| {
acc.add_type(e);
acc
})
}
}
impl Triangle {
/// https://en.wikipedia.org/wiki/M%C3%B6ller%E2%80%93Trumbore_intersection_algorithm
pub fn intersect(
self,
ray: Ray<Native>,
) -> Option<RigidTransform3D<f32, HitTestSpace, Native>> {
let Triangle {
first: v0,
second: v1,
third: v2,
} = self;
let edge1 = v1 - v0;
let edge2 = v2 - v0;
let h = ray.direction.cross(edge2);
let a = edge1.dot(h);
if a > -EPSILON && a < EPSILON {
// ray is parallel to triangle
return None;
}
let f = 1. / a;
let s = ray.origin - v0.to_vector();
// barycentric coordinate of intersection point u
let u = f * s.dot(h);
// barycentric coordinates have range (0, 1)
if u < 0. || u > 1. {
// the intersection is outside the triangle
return None;
}
let q = s.cross(edge1);
// barycentric coordinate of intersection point v
let v = f * ray.direction.dot(q);
// barycentric coordinates have range (0, 1)
// and their sum must not be greater than 1
if v < 0. || u + v > 1. {
// the intersection is outside the triangle
return None;
}
let t = f * edge2.dot(q);
if t > EPSILON {
let origin = ray.origin + ray.direction * t;
// this is not part of the Möller-Trumbore algorithm, the hit test spec
// requires it has an orientation such that the Y axis points along
// the triangle normal
let normal = edge1.cross(edge2).normalize();
let y = Vector3D::new(0., 1., 0.);
let dot = normal.dot(y);
let rotation = if dot > -EPSILON && dot < EPSILON {
// vectors are parallel, return the vector itself
// XXXManishearth it's possible for the vectors to be
// antiparallel, unclear if normals need to be flipped
Rotation3D::identity()
} else {
let axis = normal.cross(y);
let cos = normal.dot(y);
// This is Rotation3D::around_axis(axis.normalize(), theta), however
// that is just Rotation3D::quaternion(axis.normalize().xyz * sin, cos),
// which is Rotation3D::quaternion(cross, dot)
Rotation3D::quaternion(axis.x, axis.y, axis.z, cos)
};
return Some(RigidTransform3D::new(rotation, origin));
}
// triangle is behind ray
None
}
}

View file

@ -0,0 +1,74 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::Hand;
use crate::Input;
use crate::JointFrame;
use crate::Native;
use euclid::RigidTransform3D;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct InputId(pub u32);
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum Handedness {
None,
Left,
Right,
}
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum TargetRayMode {
Gaze,
TrackedPointer,
Screen,
TransientPointer,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct InputSource {
pub handedness: Handedness,
pub target_ray_mode: TargetRayMode,
pub id: InputId,
pub supports_grip: bool,
pub hand_support: Option<Hand<()>>,
pub profiles: Vec<String>,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct InputFrame {
pub id: InputId,
pub target_ray_origin: Option<RigidTransform3D<f32, Input, Native>>,
pub grip_origin: Option<RigidTransform3D<f32, Input, Native>>,
pub pressed: bool,
pub hand: Option<Box<Hand<JointFrame>>>,
pub squeezed: bool,
pub button_values: Vec<f32>,
pub axis_values: Vec<f32>,
pub input_changed: bool,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum SelectEvent {
/// Selection started
Start,
/// Selection ended *without* it being a contiguous select event
End,
/// Selection ended *with* it being a contiguous select event
Select,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum SelectKind {
Select,
Squeeze,
}

View file

@ -0,0 +1,296 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::Error;
use crate::Viewport;
use crate::Viewports;
use euclid::Rect;
use euclid::Size2D;
use std::fmt::Debug;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))]
pub struct ContextId(pub u64);
#[cfg(feature = "ipc")]
use serde::{Deserialize, Serialize};
pub trait GLTypes {
type Device;
type Context;
type Bindings;
}
pub trait GLContexts<GL: GLTypes> {
fn bindings(&mut self, device: &GL::Device, context_id: ContextId) -> Option<&GL::Bindings>;
fn context(&mut self, device: &GL::Device, context_id: ContextId) -> Option<&mut GL::Context>;
}
impl GLTypes for () {
type Bindings = ();
type Device = ();
type Context = ();
}
impl GLContexts<()> for () {
fn context(&mut self, _: &(), _: ContextId) -> Option<&mut ()> {
Some(self)
}
fn bindings(&mut self, _: &(), _: ContextId) -> Option<&()> {
Some(self)
}
}
pub trait LayerGrandManagerAPI<GL: GLTypes> {
fn create_layer_manager(&self, factory: LayerManagerFactory<GL>)
-> Result<LayerManager, Error>;
fn clone_layer_grand_manager(&self) -> LayerGrandManager<GL>;
}
pub struct LayerGrandManager<GL>(Box<dyn Send + LayerGrandManagerAPI<GL>>);
impl<GL: GLTypes> Clone for LayerGrandManager<GL> {
fn clone(&self) -> Self {
self.0.clone_layer_grand_manager()
}
}
impl<GL> Debug for LayerGrandManager<GL> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
"LayerGrandManager(...)".fmt(fmt)
}
}
impl<GL: GLTypes> LayerGrandManager<GL> {
pub fn new<GM>(grand_manager: GM) -> LayerGrandManager<GL>
where
GM: 'static + Send + LayerGrandManagerAPI<GL>,
{
LayerGrandManager(Box::new(grand_manager))
}
pub fn create_layer_manager<F, M>(&self, factory: F) -> Result<LayerManager, Error>
where
F: 'static + Send + FnOnce(&mut GL::Device, &mut dyn GLContexts<GL>) -> Result<M, Error>,
M: 'static + LayerManagerAPI<GL>,
{
self.0
.create_layer_manager(LayerManagerFactory::new(factory))
}
}
pub trait LayerManagerAPI<GL: GLTypes> {
fn create_layer(
&mut self,
device: &mut GL::Device,
contexts: &mut dyn GLContexts<GL>,
context_id: ContextId,
init: LayerInit,
) -> Result<LayerId, Error>;
fn destroy_layer(
&mut self,
device: &mut GL::Device,
contexts: &mut dyn GLContexts<GL>,
context_id: ContextId,
layer_id: LayerId,
);
fn layers(&self) -> &[(ContextId, LayerId)];
fn begin_frame(
&mut self,
device: &mut GL::Device,
contexts: &mut dyn GLContexts<GL>,
layers: &[(ContextId, LayerId)],
) -> Result<Vec<SubImages>, Error>;
fn end_frame(
&mut self,
device: &mut GL::Device,
contexts: &mut dyn GLContexts<GL>,
layers: &[(ContextId, LayerId)],
) -> Result<(), Error>;
}
pub struct LayerManager(Box<dyn Send + LayerManagerAPI<()>>);
impl Debug for LayerManager {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
"LayerManager(...)".fmt(fmt)
}
}
impl LayerManager {
pub fn create_layer(
&mut self,
context_id: ContextId,
init: LayerInit,
) -> Result<LayerId, Error> {
self.0.create_layer(&mut (), &mut (), context_id, init)
}
pub fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId) {
self.0.destroy_layer(&mut (), &mut (), context_id, layer_id);
}
pub fn begin_frame(
&mut self,
layers: &[(ContextId, LayerId)],
) -> Result<Vec<SubImages>, Error> {
self.0.begin_frame(&mut (), &mut (), layers)
}
pub fn end_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Result<(), Error> {
self.0.end_frame(&mut (), &mut (), layers)
}
}
impl LayerManager {
pub fn new<M>(manager: M) -> LayerManager
where
M: 'static + Send + LayerManagerAPI<()>,
{
LayerManager(Box::new(manager))
}
}
impl Drop for LayerManager {
fn drop(&mut self) {
log::debug!("Dropping LayerManager");
for (context_id, layer_id) in self.0.layers().to_vec() {
self.destroy_layer(context_id, layer_id);
}
}
}
pub struct LayerManagerFactory<GL: GLTypes>(
Box<
dyn Send
+ FnOnce(
&mut GL::Device,
&mut dyn GLContexts<GL>,
) -> Result<Box<dyn LayerManagerAPI<GL>>, Error>,
>,
);
impl<GL: GLTypes> Debug for LayerManagerFactory<GL> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
"LayerManagerFactory(...)".fmt(fmt)
}
}
impl<GL: GLTypes> LayerManagerFactory<GL> {
pub fn new<F, M>(factory: F) -> LayerManagerFactory<GL>
where
F: 'static + Send + FnOnce(&mut GL::Device, &mut dyn GLContexts<GL>) -> Result<M, Error>,
M: 'static + LayerManagerAPI<GL>,
{
LayerManagerFactory(Box::new(move |device, contexts| {
Ok(Box::new(factory(device, contexts)?))
}))
}
pub fn build(
self,
device: &mut GL::Device,
contexts: &mut dyn GLContexts<GL>,
) -> Result<Box<dyn LayerManagerAPI<GL>>, Error> {
(self.0)(device, contexts)
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))]
pub struct LayerId(usize);
static NEXT_LAYER_ID: AtomicUsize = AtomicUsize::new(0);
impl LayerId {
pub fn new() -> LayerId {
LayerId(NEXT_LAYER_ID.fetch_add(1, Ordering::SeqCst))
}
}
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))]
pub enum LayerInit {
// https://www.w3.org/TR/webxr/#dictdef-xrwebgllayerinit
WebGLLayer {
antialias: bool,
depth: bool,
stencil: bool,
alpha: bool,
ignore_depth_values: bool,
framebuffer_scale_factor: f32,
},
// https://immersive-web.github.io/layers/#xrprojectionlayerinittype
ProjectionLayer {
depth: bool,
stencil: bool,
alpha: bool,
scale_factor: f32,
},
// TODO: other layer types
}
impl LayerInit {
pub fn texture_size(&self, viewports: &Viewports) -> Size2D<i32, Viewport> {
match self {
LayerInit::WebGLLayer {
framebuffer_scale_factor: scale,
..
}
| LayerInit::ProjectionLayer {
scale_factor: scale,
..
} => {
let native_size = viewports
.viewports
.iter()
.fold(Rect::zero(), |acc, view| acc.union(view))
.size;
(native_size.to_f32() * *scale).to_i32()
}
}
}
}
/// https://immersive-web.github.io/layers/#enumdef-xrlayerlayout
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))]
pub enum LayerLayout {
// TODO: Default
// Allocates one texture
Mono,
// Allocates one texture, which is split in half vertically, giving two subimages
StereoLeftRight,
// Allocates one texture, which is split in half horizonally, giving two subimages
StereoTopBottom,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))]
pub struct SubImages {
pub layer_id: LayerId,
pub sub_image: Option<SubImage>,
pub view_sub_images: Vec<SubImage>,
}
/// https://immersive-web.github.io/layers/#xrsubimagetype
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))]
pub struct SubImage {
pub color_texture: u32,
// TODO: make this Option<NonZeroU32>
pub depth_stencil_texture: Option<u32>,
pub texture_array_index: Option<u32>,
pub viewport: Rect<i32, Viewport>,
}

View file

@ -0,0 +1,175 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! This crate defines the Rust API for WebXR. It is implemented by the `webxr` crate.
mod device;
mod error;
mod events;
mod frame;
mod hand;
mod hittest;
mod input;
mod layer;
mod mock;
mod registry;
mod session;
mod space;
pub mod util;
mod view;
pub use device::DeviceAPI;
pub use device::DiscoveryAPI;
pub use error::Error;
pub use events::Event;
pub use events::EventBuffer;
pub use events::Visibility;
pub use frame::Frame;
pub use frame::FrameUpdateEvent;
pub use frame::ViewerPose;
pub use hand::Finger;
pub use hand::FingerJoint;
pub use hand::Hand;
pub use hand::HandSpace;
pub use hand::Joint;
pub use hand::JointFrame;
pub use hittest::EntityType;
pub use hittest::EntityTypes;
pub use hittest::HitTestId;
pub use hittest::HitTestResult;
pub use hittest::HitTestSource;
pub use hittest::HitTestSpace;
pub use hittest::Ray;
pub use hittest::Triangle;
pub use input::Handedness;
pub use input::InputFrame;
pub use input::InputId;
pub use input::InputSource;
pub use input::SelectEvent;
pub use input::SelectKind;
pub use input::TargetRayMode;
pub use layer::ContextId;
pub use layer::GLContexts;
pub use layer::GLTypes;
pub use layer::LayerGrandManager;
pub use layer::LayerGrandManagerAPI;
pub use layer::LayerId;
pub use layer::LayerInit;
pub use layer::LayerLayout;
pub use layer::LayerManager;
pub use layer::LayerManagerAPI;
pub use layer::LayerManagerFactory;
pub use layer::SubImage;
pub use layer::SubImages;
pub use mock::MockButton;
pub use mock::MockButtonType;
pub use mock::MockDeviceInit;
pub use mock::MockDeviceMsg;
pub use mock::MockDiscoveryAPI;
pub use mock::MockInputInit;
pub use mock::MockInputMsg;
pub use mock::MockRegion;
pub use mock::MockViewInit;
pub use mock::MockViewsInit;
pub use mock::MockWorld;
pub use registry::MainThreadRegistry;
pub use registry::MainThreadWaker;
pub use registry::Registry;
pub use session::EnvironmentBlendMode;
pub use session::MainThreadSession;
pub use session::Quitter;
pub use session::Session;
pub use session::SessionBuilder;
pub use session::SessionId;
pub use session::SessionInit;
pub use session::SessionMode;
pub use session::SessionThread;
pub use space::ApiSpace;
pub use space::BaseSpace;
pub use space::Space;
pub use view::Capture;
pub use view::CubeBack;
pub use view::CubeBottom;
pub use view::CubeLeft;
pub use view::CubeRight;
pub use view::CubeTop;
pub use view::Display;
pub use view::Floor;
pub use view::Input;
pub use view::LeftEye;
pub use view::Native;
pub use view::RightEye;
pub use view::SomeEye;
pub use view::View;
pub use view::Viewer;
pub use view::Viewport;
pub use view::Viewports;
pub use view::Views;
pub use view::CUBE_BACK;
pub use view::CUBE_BOTTOM;
pub use view::CUBE_LEFT;
pub use view::CUBE_RIGHT;
pub use view::CUBE_TOP;
pub use view::LEFT_EYE;
pub use view::RIGHT_EYE;
pub use view::VIEWER;
#[cfg(feature = "ipc")]
use std::thread;
use std::time::Duration;
#[cfg(feature = "ipc")]
pub use ipc_channel::ipc::IpcSender as Sender;
#[cfg(feature = "ipc")]
pub use ipc_channel::ipc::IpcReceiver as Receiver;
#[cfg(feature = "ipc")]
pub use ipc_channel::ipc::channel;
#[cfg(not(feature = "ipc"))]
pub use std::sync::mpsc::{Receiver, RecvTimeoutError, Sender};
#[cfg(not(feature = "ipc"))]
pub fn channel<T>() -> Result<(Sender<T>, Receiver<T>), ()> {
Ok(std::sync::mpsc::channel())
}
#[cfg(not(feature = "ipc"))]
pub fn recv_timeout<T>(receiver: &Receiver<T>, timeout: Duration) -> Result<T, RecvTimeoutError> {
receiver.recv_timeout(timeout)
}
#[cfg(feature = "ipc")]
pub fn recv_timeout<T>(
receiver: &Receiver<T>,
timeout: Duration,
) -> Result<T, ipc_channel::ipc::TryRecvError>
where
T: serde::Serialize + for<'a> serde::Deserialize<'a>,
{
// Sigh, polling, sigh.
let mut delay = timeout / 1000;
while delay < timeout {
if let Ok(msg) = receiver.try_recv() {
return Ok(msg);
}
thread::sleep(delay);
delay = delay * 2;
}
receiver.try_recv()
}

View file

@ -0,0 +1,146 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::DiscoveryAPI;
use crate::Display;
use crate::EntityType;
use crate::Error;
use crate::Floor;
use crate::Handedness;
use crate::Input;
use crate::InputId;
use crate::InputSource;
use crate::LeftEye;
use crate::Native;
use crate::Receiver;
use crate::RightEye;
use crate::SelectEvent;
use crate::SelectKind;
use crate::Sender;
use crate::TargetRayMode;
use crate::Triangle;
use crate::Viewer;
use crate::Viewport;
use crate::Visibility;
use euclid::{Point2D, Rect, RigidTransform3D, Transform3D};
#[cfg(feature = "ipc")]
use serde::{Deserialize, Serialize};
/// A trait for discovering mock XR devices
pub trait MockDiscoveryAPI<GL>: 'static {
fn simulate_device_connection(
&mut self,
init: MockDeviceInit,
receiver: Receiver<MockDeviceMsg>,
) -> Result<Box<dyn DiscoveryAPI<GL>>, Error>;
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct MockDeviceInit {
pub floor_origin: Option<RigidTransform3D<f32, Floor, Native>>,
pub supports_inline: bool,
pub supports_vr: bool,
pub supports_ar: bool,
pub viewer_origin: Option<RigidTransform3D<f32, Viewer, Native>>,
pub views: MockViewsInit,
pub supported_features: Vec<String>,
pub world: Option<MockWorld>,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct MockViewInit<Eye> {
pub transform: RigidTransform3D<f32, Viewer, Eye>,
pub projection: Transform3D<f32, Eye, Display>,
pub viewport: Rect<i32, Viewport>,
/// field of view values, in radians
pub fov: Option<(f32, f32, f32, f32)>,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum MockViewsInit {
Mono(MockViewInit<Viewer>),
Stereo(MockViewInit<LeftEye>, MockViewInit<RightEye>),
}
#[derive(Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum MockDeviceMsg {
SetViewerOrigin(Option<RigidTransform3D<f32, Viewer, Native>>),
SetFloorOrigin(Option<RigidTransform3D<f32, Floor, Native>>),
SetViews(MockViewsInit),
AddInputSource(MockInputInit),
MessageInputSource(InputId, MockInputMsg),
VisibilityChange(Visibility),
SetWorld(MockWorld),
ClearWorld,
Disconnect(Sender<()>),
SetBoundsGeometry(Vec<Point2D<f32, Floor>>),
SimulateResetPose,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct MockInputInit {
pub source: InputSource,
pub pointer_origin: Option<RigidTransform3D<f32, Input, Native>>,
pub grip_origin: Option<RigidTransform3D<f32, Input, Native>>,
pub supported_buttons: Vec<MockButton>,
}
#[derive(Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum MockInputMsg {
SetHandedness(Handedness),
SetTargetRayMode(TargetRayMode),
SetProfiles(Vec<String>),
SetPointerOrigin(Option<RigidTransform3D<f32, Input, Native>>),
SetGripOrigin(Option<RigidTransform3D<f32, Input, Native>>),
/// Note: SelectEvent::Select here refers to a complete Select event,
/// not just the end event, i.e. it refers to
/// https://immersive-web.github.io/webxr-test-api/#dom-fakexrinputcontroller-simulateselect
TriggerSelect(SelectKind, SelectEvent),
Disconnect,
Reconnect,
SetSupportedButtons(Vec<MockButton>),
UpdateButtonState(MockButton),
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct MockRegion {
pub faces: Vec<Triangle>,
pub ty: EntityType,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct MockWorld {
pub regions: Vec<MockRegion>,
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum MockButtonType {
Grip,
Touchpad,
Thumbstick,
OptionalButton,
OptionalThumbstick,
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct MockButton {
pub button_type: MockButtonType,
pub pressed: bool,
pub touched: bool,
pub pressed_value: f32,
pub x_value: f32,
pub y_value: f32,
}

View file

@ -0,0 +1,262 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::DiscoveryAPI;
use crate::Error;
use crate::Frame;
use crate::GLTypes;
use crate::LayerGrandManager;
use crate::MainThreadSession;
use crate::MockDeviceInit;
use crate::MockDeviceMsg;
use crate::MockDiscoveryAPI;
use crate::Receiver;
use crate::Sender;
use crate::Session;
use crate::SessionBuilder;
use crate::SessionId;
use crate::SessionInit;
use crate::SessionMode;
use log::warn;
#[cfg(feature = "ipc")]
use serde::{Deserialize, Serialize};
#[derive(Clone)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct Registry {
sender: Sender<RegistryMsg>,
waker: MainThreadWakerImpl,
}
pub struct MainThreadRegistry<GL> {
discoveries: Vec<Box<dyn DiscoveryAPI<GL>>>,
sessions: Vec<Box<dyn MainThreadSession>>,
mocks: Vec<Box<dyn MockDiscoveryAPI<GL>>>,
sender: Sender<RegistryMsg>,
receiver: Receiver<RegistryMsg>,
waker: MainThreadWakerImpl,
grand_manager: LayerGrandManager<GL>,
next_session_id: u32,
}
pub trait MainThreadWaker: 'static + Send {
fn clone_box(&self) -> Box<dyn MainThreadWaker>;
fn wake(&self);
}
impl Clone for Box<dyn MainThreadWaker> {
fn clone(&self) -> Self {
self.clone_box()
}
}
#[derive(Clone)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
struct MainThreadWakerImpl {
#[cfg(feature = "ipc")]
sender: Sender<()>,
#[cfg(not(feature = "ipc"))]
waker: Box<dyn MainThreadWaker>,
}
#[cfg(feature = "ipc")]
impl MainThreadWakerImpl {
fn new(waker: Box<dyn MainThreadWaker>) -> Result<MainThreadWakerImpl, Error> {
let (sender, receiver) = crate::channel().or(Err(Error::CommunicationError))?;
ipc_channel::router::ROUTER.add_typed_route(receiver, Box::new(move |_| waker.wake()));
Ok(MainThreadWakerImpl { sender })
}
fn wake(&self) {
let _ = self.sender.send(());
}
}
#[cfg(not(feature = "ipc"))]
impl MainThreadWakerImpl {
fn new(waker: Box<dyn MainThreadWaker>) -> Result<MainThreadWakerImpl, Error> {
Ok(MainThreadWakerImpl { waker })
}
pub fn wake(&self) {
self.waker.wake()
}
}
impl Registry {
pub fn supports_session(&mut self, mode: SessionMode, dest: Sender<Result<(), Error>>) {
let _ = self.sender.send(RegistryMsg::SupportsSession(mode, dest));
self.waker.wake();
}
pub fn request_session(
&mut self,
mode: SessionMode,
init: SessionInit,
dest: Sender<Result<Session, Error>>,
animation_frame_handler: Sender<Frame>,
) {
let _ = self.sender.send(RegistryMsg::RequestSession(
mode,
init,
dest,
animation_frame_handler,
));
self.waker.wake();
}
pub fn simulate_device_connection(
&mut self,
init: MockDeviceInit,
dest: Sender<Result<Sender<MockDeviceMsg>, Error>>,
) {
let _ = self
.sender
.send(RegistryMsg::SimulateDeviceConnection(init, dest));
self.waker.wake();
}
}
impl<GL: 'static + GLTypes> MainThreadRegistry<GL> {
pub fn new(
waker: Box<dyn MainThreadWaker>,
grand_manager: LayerGrandManager<GL>,
) -> Result<Self, Error> {
let (sender, receiver) = crate::channel().or(Err(Error::CommunicationError))?;
let discoveries = Vec::new();
let sessions = Vec::new();
let mocks = Vec::new();
let waker = MainThreadWakerImpl::new(waker)?;
Ok(MainThreadRegistry {
discoveries,
sessions,
mocks,
sender,
receiver,
waker,
grand_manager,
next_session_id: 0,
})
}
pub fn registry(&self) -> Registry {
Registry {
sender: self.sender.clone(),
waker: self.waker.clone(),
}
}
pub fn register<D>(&mut self, discovery: D)
where
D: DiscoveryAPI<GL>,
{
self.discoveries.push(Box::new(discovery));
}
pub fn register_mock<D>(&mut self, discovery: D)
where
D: MockDiscoveryAPI<GL>,
{
self.mocks.push(Box::new(discovery));
}
pub fn run_on_main_thread<S>(&mut self, session: S)
where
S: MainThreadSession,
{
self.sessions.push(Box::new(session));
}
pub fn run_one_frame(&mut self) {
while let Ok(msg) = self.receiver.try_recv() {
self.handle_msg(msg);
}
for session in &mut self.sessions {
session.run_one_frame();
}
self.sessions.retain(|session| session.running());
}
pub fn running(&self) -> bool {
self.sessions.iter().any(|session| session.running())
}
fn handle_msg(&mut self, msg: RegistryMsg) {
match msg {
RegistryMsg::SupportsSession(mode, dest) => {
let _ = dest.send(self.supports_session(mode));
}
RegistryMsg::RequestSession(mode, init, dest, raf_sender) => {
let _ = dest.send(self.request_session(mode, init, raf_sender));
}
RegistryMsg::SimulateDeviceConnection(init, dest) => {
let _ = dest.send(self.simulate_device_connection(init));
}
}
}
fn supports_session(&mut self, mode: SessionMode) -> Result<(), Error> {
for discovery in &self.discoveries {
if discovery.supports_session(mode) {
return Ok(());
}
}
Err(Error::NoMatchingDevice)
}
fn request_session(
&mut self,
mode: SessionMode,
init: SessionInit,
raf_sender: Sender<Frame>,
) -> Result<Session, Error> {
for discovery in &mut self.discoveries {
if discovery.supports_session(mode) {
let raf_sender = raf_sender.clone();
let id = SessionId(self.next_session_id);
self.next_session_id += 1;
let xr = SessionBuilder::new(
&mut self.sessions,
raf_sender,
self.grand_manager.clone(),
id,
);
match discovery.request_session(mode, &init, xr) {
Ok(session) => return Ok(session),
Err(err) => warn!("XR device error {:?}", err),
}
}
}
warn!("no device could support the session");
Err(Error::NoMatchingDevice)
}
fn simulate_device_connection(
&mut self,
init: MockDeviceInit,
) -> Result<Sender<MockDeviceMsg>, Error> {
for mock in &mut self.mocks {
let (sender, receiver) = crate::channel().or(Err(Error::CommunicationError))?;
if let Ok(discovery) = mock.simulate_device_connection(init.clone(), receiver) {
self.discoveries.insert(0, discovery);
return Ok(sender);
}
}
Err(Error::NoMatchingDevice)
}
}
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
enum RegistryMsg {
RequestSession(
SessionMode,
SessionInit,
Sender<Result<Session, Error>>,
Sender<Frame>,
),
SupportsSession(SessionMode, Sender<Result<(), Error>>),
SimulateDeviceConnection(MockDeviceInit, Sender<Result<Sender<MockDeviceMsg>, Error>>),
}

View file

@ -0,0 +1,531 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::channel;
use crate::ContextId;
use crate::DeviceAPI;
use crate::Error;
use crate::Event;
use crate::Floor;
use crate::Frame;
use crate::FrameUpdateEvent;
use crate::HitTestId;
use crate::HitTestSource;
use crate::InputSource;
use crate::LayerGrandManager;
use crate::LayerId;
use crate::LayerInit;
use crate::Native;
use crate::Receiver;
use crate::Sender;
use crate::Viewport;
use crate::Viewports;
use euclid::Point2D;
use euclid::Rect;
use euclid::RigidTransform3D;
use euclid::Size2D;
use log::warn;
use std::thread;
use std::time::Duration;
#[cfg(feature = "ipc")]
use serde::{Deserialize, Serialize};
// How long to wait for an rAF.
static TIMEOUT: Duration = Duration::from_millis(5);
/// https://www.w3.org/TR/webxr/#xrsessionmode-enum
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum SessionMode {
Inline,
ImmersiveVR,
ImmersiveAR,
}
/// https://immersive-web.github.io/webxr/#dictdef-xrsessioninit
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct SessionInit {
pub required_features: Vec<String>,
pub optional_features: Vec<String>,
/// Secondary views are enabled with the `secondary-view` feature
/// but for performance reasons we also ask users to enable this pref
/// for now.
pub first_person_observer_view: bool,
}
impl SessionInit {
/// Helper function for validating a list of requested features against
/// a list of supported features for a given mode
pub fn validate(&self, mode: SessionMode, supported: &[String]) -> Result<Vec<String>, Error> {
for f in &self.required_features {
// viewer and local in immersive are granted by default
// https://immersive-web.github.io/webxr/#default-features
if f == "viewer" || (f == "local" && mode != SessionMode::Inline) {
continue;
}
if !supported.contains(f) {
return Err(Error::UnsupportedFeature(f.into()));
}
}
let mut granted = self.required_features.clone();
for f in &self.optional_features {
if f == "viewer"
|| (f == "local" && mode != SessionMode::Inline)
|| supported.contains(f)
{
granted.push(f.clone());
}
}
Ok(granted)
}
pub fn feature_requested(&self, f: &str) -> bool {
self.required_features
.iter()
.chain(self.optional_features.iter())
.find(|x| *x == f)
.is_some()
}
}
/// https://immersive-web.github.io/webxr-ar-module/#xrenvironmentblendmode-enum
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum EnvironmentBlendMode {
Opaque,
AlphaBlend,
Additive,
}
// The messages that are sent from the content thread to the session thread.
#[derive(Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
enum SessionMsg {
CreateLayer(ContextId, LayerInit, Sender<Result<LayerId, Error>>),
DestroyLayer(ContextId, LayerId),
SetLayers(Vec<(ContextId, LayerId)>),
SetEventDest(Sender<Event>),
UpdateClipPlanes(/* near */ f32, /* far */ f32),
StartRenderLoop,
RenderAnimationFrame,
RequestHitTest(HitTestSource),
CancelHitTest(HitTestId),
UpdateFrameRate(f32, Sender<f32>),
Quit,
GetBoundsGeometry(Sender<Option<Vec<Point2D<f32, Floor>>>>),
}
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
#[derive(Clone)]
pub struct Quitter {
sender: Sender<SessionMsg>,
}
impl Quitter {
pub fn quit(&self) {
let _ = self.sender.send(SessionMsg::Quit);
}
}
/// An object that represents an XR session.
/// This is owned by the content thread.
/// https://www.w3.org/TR/webxr/#xrsession-interface
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct Session {
floor_transform: Option<RigidTransform3D<f32, Native, Floor>>,
viewports: Viewports,
sender: Sender<SessionMsg>,
environment_blend_mode: EnvironmentBlendMode,
initial_inputs: Vec<InputSource>,
granted_features: Vec<String>,
id: SessionId,
supported_frame_rates: Vec<f32>,
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
#[cfg_attr(feature = "ipc", derive(Deserialize, Serialize))]
pub struct SessionId(pub(crate) u32);
impl Session {
pub fn id(&self) -> SessionId {
self.id
}
pub fn floor_transform(&self) -> Option<RigidTransform3D<f32, Native, Floor>> {
self.floor_transform.clone()
}
pub fn reference_space_bounds(&self) -> Option<Vec<Point2D<f32, Floor>>> {
let (sender, receiver) = channel().ok()?;
let _ = self.sender.send(SessionMsg::GetBoundsGeometry(sender));
receiver.recv().ok()?
}
pub fn initial_inputs(&self) -> &[InputSource] {
&self.initial_inputs
}
pub fn environment_blend_mode(&self) -> EnvironmentBlendMode {
self.environment_blend_mode
}
pub fn viewports(&self) -> &[Rect<i32, Viewport>] {
&self.viewports.viewports
}
/// A resolution large enough to contain all the viewports.
/// https://immersive-web.github.io/webxr/#recommended-webgl-framebuffer-resolution
///
/// Returns None if the session is inline
pub fn recommended_framebuffer_resolution(&self) -> Option<Size2D<i32, Viewport>> {
self.viewports()
.iter()
.fold(None::<Rect<_, _>>, |acc, vp| {
Some(acc.map(|a| a.union(vp)).unwrap_or(*vp))
})
.map(|rect| Size2D::new(rect.max_x(), rect.max_y()))
}
pub fn create_layer(&self, context_id: ContextId, init: LayerInit) -> Result<LayerId, Error> {
let (sender, receiver) = channel().map_err(|_| Error::CommunicationError)?;
let _ = self
.sender
.send(SessionMsg::CreateLayer(context_id, init, sender));
receiver.recv().map_err(|_| Error::CommunicationError)?
}
/// Destroy a layer
pub fn destroy_layer(&self, context_id: ContextId, layer_id: LayerId) {
let _ = self
.sender
.send(SessionMsg::DestroyLayer(context_id, layer_id));
}
pub fn set_layers(&self, layers: Vec<(ContextId, LayerId)>) {
let _ = self.sender.send(SessionMsg::SetLayers(layers));
}
pub fn start_render_loop(&mut self) {
let _ = self.sender.send(SessionMsg::StartRenderLoop);
}
pub fn update_clip_planes(&mut self, near: f32, far: f32) {
let _ = self.sender.send(SessionMsg::UpdateClipPlanes(near, far));
}
pub fn set_event_dest(&mut self, dest: Sender<Event>) {
let _ = self.sender.send(SessionMsg::SetEventDest(dest));
}
pub fn render_animation_frame(&mut self) {
let _ = self.sender.send(SessionMsg::RenderAnimationFrame);
}
pub fn end_session(&mut self) {
let _ = self.sender.send(SessionMsg::Quit);
}
pub fn apply_event(&mut self, event: FrameUpdateEvent) {
match event {
FrameUpdateEvent::UpdateFloorTransform(floor) => self.floor_transform = floor,
FrameUpdateEvent::UpdateViewports(vp) => self.viewports = vp,
FrameUpdateEvent::HitTestSourceAdded(_) => (),
}
}
pub fn granted_features(&self) -> &[String] {
&self.granted_features
}
pub fn request_hit_test(&self, source: HitTestSource) {
let _ = self.sender.send(SessionMsg::RequestHitTest(source));
}
pub fn cancel_hit_test(&self, id: HitTestId) {
let _ = self.sender.send(SessionMsg::CancelHitTest(id));
}
pub fn update_frame_rate(&mut self, rate: f32, sender: Sender<f32>) {
let _ = self.sender.send(SessionMsg::UpdateFrameRate(rate, sender));
}
pub fn supported_frame_rates(&self) -> &[f32] {
&self.supported_frame_rates
}
}
#[derive(PartialEq)]
enum RenderState {
NotInRenderLoop,
InRenderLoop,
PendingQuit,
}
/// For devices that want to do their own thread management, the `SessionThread` type is exposed.
pub struct SessionThread<Device> {
receiver: Receiver<SessionMsg>,
sender: Sender<SessionMsg>,
layers: Vec<(ContextId, LayerId)>,
pending_layers: Option<Vec<(ContextId, LayerId)>>,
frame_count: u64,
frame_sender: Sender<Frame>,
running: bool,
device: Device,
id: SessionId,
render_state: RenderState,
}
impl<Device> SessionThread<Device>
where
Device: DeviceAPI,
{
pub fn new(
mut device: Device,
frame_sender: Sender<Frame>,
id: SessionId,
) -> Result<Self, Error> {
let (sender, receiver) = crate::channel().or(Err(Error::CommunicationError))?;
device.set_quitter(Quitter {
sender: sender.clone(),
});
let frame_count = 0;
let running = true;
let layers = Vec::new();
let pending_layers = None;
Ok(SessionThread {
sender,
receiver,
device,
layers,
pending_layers,
frame_count,
frame_sender,
running,
id,
render_state: RenderState::NotInRenderLoop,
})
}
pub fn new_session(&mut self) -> Session {
let floor_transform = self.device.floor_transform();
let viewports = self.device.viewports();
let sender = self.sender.clone();
let initial_inputs = self.device.initial_inputs();
let environment_blend_mode = self.device.environment_blend_mode();
let granted_features = self.device.granted_features().into();
let supported_frame_rates = self.device.supported_frame_rates();
Session {
floor_transform,
viewports,
sender,
initial_inputs,
environment_blend_mode,
granted_features,
id: self.id,
supported_frame_rates,
}
}
pub fn run(&mut self) {
loop {
if let Ok(msg) = self.receiver.recv() {
if !self.handle_msg(msg) {
self.running = false;
break;
}
} else {
break;
}
}
}
fn handle_msg(&mut self, msg: SessionMsg) -> bool {
log::debug!("processing {:?}", msg);
match msg {
SessionMsg::SetEventDest(dest) => {
self.device.set_event_dest(dest);
}
SessionMsg::RequestHitTest(source) => {
self.device.request_hit_test(source);
}
SessionMsg::CancelHitTest(id) => {
self.device.cancel_hit_test(id);
}
SessionMsg::CreateLayer(context_id, layer_init, sender) => {
let result = self.device.create_layer(context_id, layer_init);
let _ = sender.send(result);
}
SessionMsg::DestroyLayer(context_id, layer_id) => {
self.layers.retain(|&(_, other_id)| layer_id != other_id);
self.device.destroy_layer(context_id, layer_id);
}
SessionMsg::SetLayers(layers) => {
self.pending_layers = Some(layers);
}
SessionMsg::StartRenderLoop => {
if let Some(layers) = self.pending_layers.take() {
self.layers = layers;
}
let frame = match self.device.begin_animation_frame(&self.layers[..]) {
Some(frame) => frame,
None => {
warn!("Device stopped providing frames, exiting");
return false;
}
};
self.render_state = RenderState::InRenderLoop;
let _ = self.frame_sender.send(frame);
}
SessionMsg::UpdateClipPlanes(near, far) => self.device.update_clip_planes(near, far),
SessionMsg::RenderAnimationFrame => {
self.frame_count += 1;
self.device.end_animation_frame(&self.layers[..]);
if self.render_state == RenderState::PendingQuit {
self.quit();
return false;
}
if let Some(layers) = self.pending_layers.take() {
self.layers = layers;
}
#[allow(unused_mut)]
let mut frame = match self.device.begin_animation_frame(&self.layers[..]) {
Some(frame) => frame,
None => {
warn!("Device stopped providing frames, exiting");
return false;
}
};
let _ = self.frame_sender.send(frame);
}
SessionMsg::UpdateFrameRate(rate, sender) => {
let new_framerate = self.device.update_frame_rate(rate);
let _ = sender.send(new_framerate);
}
SessionMsg::Quit => {
if self.render_state == RenderState::NotInRenderLoop {
self.quit();
return false;
} else {
self.render_state = RenderState::PendingQuit;
}
}
SessionMsg::GetBoundsGeometry(sender) => {
let bounds = self.device.reference_space_bounds();
let _ = sender.send(bounds);
}
}
true
}
fn quit(&mut self) {
self.render_state = RenderState::NotInRenderLoop;
self.device.quit();
}
}
/// Devices that need to can run sessions on the main thread.
pub trait MainThreadSession: 'static {
fn run_one_frame(&mut self);
fn running(&self) -> bool;
}
impl<Device> MainThreadSession for SessionThread<Device>
where
Device: DeviceAPI,
{
fn run_one_frame(&mut self) {
let frame_count = self.frame_count;
while frame_count == self.frame_count && self.running {
if let Ok(msg) = crate::recv_timeout(&self.receiver, TIMEOUT) {
self.running = self.handle_msg(msg);
} else {
break;
}
}
}
fn running(&self) -> bool {
self.running
}
}
/// A type for building XR sessions
pub struct SessionBuilder<'a, GL> {
sessions: &'a mut Vec<Box<dyn MainThreadSession>>,
frame_sender: Sender<Frame>,
layer_grand_manager: LayerGrandManager<GL>,
id: SessionId,
}
impl<'a, GL: 'static> SessionBuilder<'a, GL> {
pub fn id(&self) -> SessionId {
self.id
}
pub(crate) fn new(
sessions: &'a mut Vec<Box<dyn MainThreadSession>>,
frame_sender: Sender<Frame>,
layer_grand_manager: LayerGrandManager<GL>,
id: SessionId,
) -> Self {
SessionBuilder {
sessions,
frame_sender,
layer_grand_manager,
id,
}
}
/// For devices which are happy to hand over thread management to webxr.
pub fn spawn<Device, Factory>(self, factory: Factory) -> Result<Session, Error>
where
Factory: 'static + FnOnce(LayerGrandManager<GL>) -> Result<Device, Error> + Send,
Device: DeviceAPI,
{
let (acks, ackr) = crate::channel().or(Err(Error::CommunicationError))?;
let frame_sender = self.frame_sender;
let layer_grand_manager = self.layer_grand_manager;
let id = self.id;
thread::spawn(move || {
match factory(layer_grand_manager)
.and_then(|device| SessionThread::new(device, frame_sender, id))
{
Ok(mut thread) => {
let session = thread.new_session();
let _ = acks.send(Ok(session));
thread.run();
}
Err(err) => {
let _ = acks.send(Err(err));
}
}
});
ackr.recv().unwrap_or(Err(Error::CommunicationError))
}
/// For devices that need to run on the main thread.
pub fn run_on_main_thread<Device, Factory>(self, factory: Factory) -> Result<Session, Error>
where
Factory: 'static + FnOnce(LayerGrandManager<GL>) -> Result<Device, Error>,
Device: DeviceAPI,
{
let device = factory(self.layer_grand_manager)?;
let frame_sender = self.frame_sender;
let mut session_thread = SessionThread::new(device, frame_sender, self.id)?;
let session = session_thread.new_session();
self.sessions.push(Box::new(session_thread));
Ok(session)
}
}

View file

@ -0,0 +1,28 @@
use crate::InputId;
use crate::Joint;
use euclid::RigidTransform3D;
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
/// A stand-in type for "the space isn't statically known since
/// it comes from client side code"
pub struct ApiSpace;
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub enum BaseSpace {
Local,
Floor,
Viewer,
BoundedFloor,
TargetRay(InputId),
Grip(InputId),
Joint(InputId, Joint),
}
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct Space {
pub base: BaseSpace,
pub offset: RigidTransform3D<f32, ApiSpace, ApiSpace>,
}

View file

@ -0,0 +1,129 @@
use crate::FrameUpdateEvent;
use crate::HitTestId;
use crate::HitTestSource;
use euclid::Transform3D;
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
pub struct ClipPlanes {
pub near: f32,
pub far: f32,
/// Was there an update that needs propagation to the client?
update: bool,
}
impl Default for ClipPlanes {
fn default() -> Self {
ClipPlanes {
near: 0.1,
far: 1000.,
update: false,
}
}
}
impl ClipPlanes {
pub fn update(&mut self, near: f32, far: f32) {
self.near = near;
self.far = far;
self.update = true;
}
/// Checks for and clears the pending update flag
pub fn recently_updated(&mut self) -> bool {
if self.update {
self.update = false;
true
} else {
false
}
}
}
#[derive(Clone, Debug, Default)]
#[cfg_attr(feature = "ipc", derive(serde::Serialize, serde::Deserialize))]
/// Holds on to hit tests
pub struct HitTestList {
tests: Vec<HitTestSource>,
uncommitted_tests: Vec<HitTestSource>,
}
impl HitTestList {
pub fn request_hit_test(&mut self, source: HitTestSource) {
self.uncommitted_tests.push(source)
}
pub fn commit_tests(&mut self) -> Vec<FrameUpdateEvent> {
let mut events = vec![];
for test in self.uncommitted_tests.drain(..) {
events.push(FrameUpdateEvent::HitTestSourceAdded(test.id));
self.tests.push(test);
}
events
}
pub fn tests(&self) -> &[HitTestSource] {
&self.tests
}
pub fn cancel_hit_test(&mut self, id: HitTestId) {
self.tests.retain(|s| s.id != id);
self.uncommitted_tests.retain(|s| s.id != id);
}
}
#[inline]
/// Construct a projection matrix given the four angles from the center for the faces of the viewing frustum
pub fn fov_to_projection_matrix<T, U>(
left: f32,
right: f32,
top: f32,
bottom: f32,
clip_planes: ClipPlanes,
) -> Transform3D<f32, T, U> {
let near = clip_planes.near;
// XXXManishearth deal with infinite planes
let left = left.tan() * near;
let right = right.tan() * near;
let top = top.tan() * near;
let bottom = bottom.tan() * near;
frustum_to_projection_matrix(left, right, top, bottom, clip_planes)
}
#[inline]
/// Construct matrix given the actual extent of the viewing frustum on the near plane
pub fn frustum_to_projection_matrix<T, U>(
left: f32,
right: f32,
top: f32,
bottom: f32,
clip_planes: ClipPlanes,
) -> Transform3D<f32, T, U> {
let near = clip_planes.near;
let far = clip_planes.far;
let w = right - left;
let h = top - bottom;
let d = far - near;
// Column-major order
Transform3D::new(
2. * near / w,
0.,
0.,
0.,
0.,
2. * near / h,
0.,
0.,
(right + left) / w,
(top + bottom) / h,
-(far + near) / d,
-1.,
0.,
0.,
-2. * far * near / d,
0.,
)
}

View file

@ -0,0 +1,170 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! This crate uses `euclid`'s typed units, and exposes different coordinate spaces.
use euclid::Rect;
use euclid::RigidTransform3D;
use euclid::Transform3D;
#[cfg(feature = "ipc")]
use serde::{Deserialize, Serialize};
use std::marker::PhantomData;
/// The coordinate space of the viewer
/// https://immersive-web.github.io/webxr/#dom-xrreferencespacetype-viewer
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Viewer {}
/// The coordinate space of the floor
/// https://immersive-web.github.io/webxr/#dom-xrreferencespacetype-local-floor
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Floor {}
/// The coordinate space of the left eye
/// https://immersive-web.github.io/webxr/#dom-xreye-left
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum LeftEye {}
/// The coordinate space of the right eye
/// https://immersive-web.github.io/webxr/#dom-xreye-right
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum RightEye {}
/// The coordinate space of the left frustrum of a cubemap
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum CubeLeft {}
/// The coordinate space of the right frustrum of a cubemap
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum CubeRight {}
/// The coordinate space of the top frustrum of a cubemap
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum CubeTop {}
/// The coordinate space of the bottom frustrum of a cubemap
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum CubeBottom {}
/// The coordinate space of the back frustrum of a cubemap
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum CubeBack {}
/// Pattern-match on eyes
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct SomeEye<Eye>(u8, PhantomData<Eye>);
pub const LEFT_EYE: SomeEye<LeftEye> = SomeEye(0, PhantomData);
pub const RIGHT_EYE: SomeEye<RightEye> = SomeEye(1, PhantomData);
pub const VIEWER: SomeEye<Viewer> = SomeEye(2, PhantomData);
pub const CUBE_LEFT: SomeEye<CubeLeft> = SomeEye(3, PhantomData);
pub const CUBE_RIGHT: SomeEye<CubeRight> = SomeEye(4, PhantomData);
pub const CUBE_TOP: SomeEye<CubeTop> = SomeEye(5, PhantomData);
pub const CUBE_BOTTOM: SomeEye<CubeBottom> = SomeEye(6, PhantomData);
pub const CUBE_BACK: SomeEye<CubeBack> = SomeEye(7, PhantomData);
impl<Eye1, Eye2> PartialEq<SomeEye<Eye2>> for SomeEye<Eye1> {
fn eq(&self, rhs: &SomeEye<Eye2>) -> bool {
self.0 == rhs.0
}
}
/// The native 3D coordinate space of the device
/// This is not part of the webvr specification.
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Native {}
/// The normalized device coordinate space, where the display
/// is from (-1,-1) to (1,1).
// TODO: are we OK assuming that we can use the same coordinate system for all displays?
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Display {}
/// The unnormalized device coordinate space, where the display
/// is from (0,0) to (w,h), measured in pixels.
// TODO: are we OK assuming that we can use the same coordinate system for all displays?
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Viewport {}
/// The coordinate space of an input device
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Input {}
/// The coordinate space of a secondary capture view
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Capture {}
/// For each eye, the pose of that eye,
/// its projection onto its display.
/// For stereo displays, we have a `View<LeftEye>` and a `View<RightEye>`.
/// For mono displays, we hagve a `View<Viewer>`
/// https://immersive-web.github.io/webxr/#xrview
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct View<Eye> {
pub transform: RigidTransform3D<f32, Eye, Native>,
pub projection: Transform3D<f32, Eye, Display>,
}
impl<Eye> Default for View<Eye> {
fn default() -> Self {
View {
transform: RigidTransform3D::identity(),
projection: Transform3D::identity(),
}
}
}
impl<Eye> View<Eye> {
pub fn cast_unit<NewEye>(&self) -> View<NewEye> {
View {
transform: self.transform.cast_unit(),
projection: Transform3D::from_untyped(&self.projection.to_untyped()),
}
}
}
/// Whether a device is mono or stereo, and the views it supports.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub enum Views {
/// Mono view for inline VR, viewport and projection matrices are calculated by client
Inline,
Mono(View<Viewer>),
Stereo(View<LeftEye>, View<RightEye>),
StereoCapture(View<LeftEye>, View<RightEye>, View<Capture>),
Cubemap(
View<Viewer>,
View<CubeLeft>,
View<CubeRight>,
View<CubeTop>,
View<CubeBottom>,
View<CubeBack>,
),
}
/// A list of viewports per-eye in the order of fields in Views.
///
/// Not all must be in active use.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "ipc", derive(Serialize, Deserialize))]
pub struct Viewports {
pub viewports: Vec<Rect<i32, Viewport>>,
}

View file

@ -0,0 +1,49 @@
[package]
name = "webxr"
version = "0.0.1"
authors = ["The Servo Project Developers"]
edition = "2018"
homepage = "https://github.com/servo/webxr"
repository = "https://github.com/servo/webxr"
keywords = ["ar", "headset", "openxr", "vr", "webxr"]
license = "MPL-2.0"
description = '''A safe Rust API that provides a way to interact with
virtual reality and augmented reality devices and integration with OpenXR.
The API is inspired by the WebXR Device API (https://www.w3.org/TR/webxr/)
but adapted to Rust design patterns.'''
[lib]
path = "lib.rs"
[features]
default = ["x11"]
x11 = ["surfman/sm-x11"]
angle = ["surfman/sm-angle"]
glwindow = []
headless = []
ipc = ["webxr-api/ipc", "serde"]
openxr-api = ["angle", "openxr", "winapi", "wio", "surfman/sm-angle-default"]
[dependencies]
webxr-api = { path = "../shared/webxr" }
crossbeam-channel = "0.5"
euclid = "0.22"
log = "0.4.6"
openxr = { version = "0.19", optional = true }
serde = { version = "1.0", optional = true }
glow = "0.16"
raw-window-handle = "0.6"
surfman = { git = "https://github.com/servo/surfman", rev = "300789ddbda45c89e9165c31118bf1c4c07f89f6", features = [
"chains",
"sm-raw-window-handle-06",
] }
[target.'cfg(target_os = "windows")'.dependencies]
winapi = { version = "0.3", features = [
"dxgi",
"d3d11",
"winerror",
], optional = true }
wio = { version = "0.2", optional = true }

View file

@ -0,0 +1,190 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::SurfmanGL;
use glow as gl;
use glow::Context as Gl;
use glow::HasContext;
use std::collections::HashMap;
use std::num::NonZero;
use surfman::Device as SurfmanDevice;
use webxr_api::ContextId;
use webxr_api::GLContexts;
use webxr_api::LayerId;
pub(crate) fn framebuffer(framebuffer: u32) -> Option<gl::NativeFramebuffer> {
NonZero::new(framebuffer).map(gl::NativeFramebuffer)
}
// A utility to clear a color texture and optional depth/stencil texture
pub(crate) struct GlClearer {
fbos: HashMap<
(
LayerId,
Option<gl::NativeTexture>,
Option<gl::NativeTexture>,
),
Option<gl::NativeFramebuffer>,
>,
should_reverse_winding: bool,
}
impl GlClearer {
pub(crate) fn new(should_reverse_winding: bool) -> GlClearer {
let fbos = HashMap::new();
GlClearer {
fbos,
should_reverse_winding,
}
}
fn fbo(
&mut self,
gl: &Gl,
layer_id: LayerId,
color: Option<gl::NativeTexture>,
color_target: u32,
depth_stencil: Option<gl::NativeTexture>,
) -> Option<gl::NativeFramebuffer> {
let should_reverse_winding = self.should_reverse_winding;
*self
.fbos
.entry((layer_id, color, depth_stencil))
.or_insert_with(|| {
// Save the current GL state
let mut bound_fbos = [0, 0];
unsafe {
gl.get_parameter_i32_slice(gl::DRAW_FRAMEBUFFER_BINDING, &mut bound_fbos[0..]);
gl.get_parameter_i32_slice(gl::READ_FRAMEBUFFER_BINDING, &mut bound_fbos[1..]);
// Generate and set attachments of a new FBO
let fbo = gl.create_framebuffer().ok();
gl.bind_framebuffer(gl::FRAMEBUFFER, fbo);
gl.framebuffer_texture_2d(
gl::FRAMEBUFFER,
gl::COLOR_ATTACHMENT0,
color_target,
color,
0,
);
gl.framebuffer_texture_2d(
gl::FRAMEBUFFER,
gl::DEPTH_STENCIL_ATTACHMENT,
gl::TEXTURE_2D,
depth_stencil,
0,
);
// Necessary if using an OpenXR runtime that does not support mutable FOV,
// as flipping the projection matrix necessitates reversing the winding order.
if should_reverse_winding {
gl.front_face(gl::CW);
}
// Restore the GL state
gl.bind_framebuffer(gl::DRAW_FRAMEBUFFER, framebuffer(bound_fbos[0] as _));
gl.bind_framebuffer(gl::READ_FRAMEBUFFER, framebuffer(bound_fbos[1] as _));
debug_assert_eq!(gl.get_error(), gl::NO_ERROR);
fbo
}
})
}
pub(crate) fn clear(
&mut self,
device: &mut SurfmanDevice,
contexts: &mut dyn GLContexts<SurfmanGL>,
context_id: ContextId,
layer_id: LayerId,
color: Option<glow::NativeTexture>,
color_target: u32,
depth_stencil: Option<glow::NativeTexture>,
) {
let gl = match contexts.bindings(device, context_id) {
None => return,
Some(gl) => gl,
};
let fbo = self.fbo(gl, layer_id, color, color_target, depth_stencil);
unsafe {
// Save the current GL state
let mut bound_fbos = [0, 0];
let mut clear_color = [0., 0., 0., 0.];
let mut clear_depth = [0.];
let mut clear_stencil = [0];
let color_mask;
let depth_mask;
let mut stencil_mask = [0];
let scissor_enabled = gl.is_enabled(gl::SCISSOR_TEST);
let rasterizer_enabled = gl.is_enabled(gl::RASTERIZER_DISCARD);
gl.get_parameter_i32_slice(gl::DRAW_FRAMEBUFFER_BINDING, &mut bound_fbos[0..]);
gl.get_parameter_i32_slice(gl::READ_FRAMEBUFFER_BINDING, &mut bound_fbos[1..]);
gl.get_parameter_f32_slice(gl::COLOR_CLEAR_VALUE, &mut clear_color[..]);
gl.get_parameter_f32_slice(gl::DEPTH_CLEAR_VALUE, &mut clear_depth[..]);
gl.get_parameter_i32_slice(gl::STENCIL_CLEAR_VALUE, &mut clear_stencil[..]);
depth_mask = gl.get_parameter_bool(gl::DEPTH_WRITEMASK);
gl.get_parameter_i32_slice(gl::STENCIL_WRITEMASK, &mut stencil_mask[..]);
color_mask = gl.get_parameter_bool_array::<4>(gl::COLOR_WRITEMASK);
// Clear it
gl.bind_framebuffer(gl::FRAMEBUFFER, fbo);
gl.clear_color(0., 0., 0., 1.);
gl.clear_depth(1.);
gl.clear_stencil(0);
gl.disable(gl::SCISSOR_TEST);
gl.disable(gl::RASTERIZER_DISCARD);
gl.depth_mask(true);
gl.stencil_mask(0xFFFFFFFF);
gl.color_mask(true, true, true, true);
gl.clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT | gl::STENCIL_BUFFER_BIT);
// Restore the GL state
gl.bind_framebuffer(gl::DRAW_FRAMEBUFFER, framebuffer(bound_fbos[0] as _));
gl.bind_framebuffer(gl::READ_FRAMEBUFFER, framebuffer(bound_fbos[1] as _));
gl.clear_color(
clear_color[0],
clear_color[1],
clear_color[2],
clear_color[3],
);
gl.color_mask(color_mask[0], color_mask[1], color_mask[2], color_mask[3]);
gl.clear_depth(clear_depth[0] as f64);
gl.clear_stencil(clear_stencil[0]);
gl.depth_mask(depth_mask);
gl.stencil_mask(stencil_mask[0] as _);
if scissor_enabled {
gl.enable(gl::SCISSOR_TEST);
}
if rasterizer_enabled {
gl.enable(gl::RASTERIZER_DISCARD);
}
debug_assert_eq!(gl.get_error(), gl::NO_ERROR);
}
}
pub(crate) fn destroy_layer(
&mut self,
device: &mut SurfmanDevice,
contexts: &mut dyn GLContexts<SurfmanGL>,
context_id: ContextId,
layer_id: LayerId,
) {
let gl = match contexts.bindings(device, context_id) {
None => return,
Some(gl) => gl,
};
self.fbos.retain(|&(other_id, _, _), &mut fbo| {
if layer_id != other_id {
true
} else {
if let Some(fbo) = fbo {
unsafe { gl.delete_framebuffer(fbo) };
}
false
}
})
}
}

View file

@ -0,0 +1,877 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::gl_utils::framebuffer;
use crate::{SurfmanGL, SurfmanLayerManager};
use core::slice;
use euclid::{
Angle, Point2D, Rect, RigidTransform3D, Rotation3D, Size2D, Transform3D, UnknownUnit, Vector3D,
};
use glow::{self as gl, Context as Gl, HasContext};
use raw_window_handle::DisplayHandle;
use std::num::NonZeroU32;
use std::rc::Rc;
use surfman::chains::{PreserveBuffer, SwapChain, SwapChainAPI, SwapChains, SwapChainsAPI};
use surfman::{
Adapter, Connection, Context as SurfmanContext, ContextAttributeFlags, ContextAttributes,
Device as SurfmanDevice, GLApi, GLVersion, NativeWidget, SurfaceAccess, SurfaceType,
};
use webxr_api::util::ClipPlanes;
use webxr_api::{
ContextId, DeviceAPI, DiscoveryAPI, Display, Error, Event, EventBuffer, Floor, Frame,
InputSource, LayerGrandManager, LayerId, LayerInit, LayerManager, Native, Quitter, Sender,
Session, SessionBuilder, SessionInit, SessionMode, SomeEye, View, Viewer, ViewerPose, Viewport,
Viewports, Views, CUBE_BACK, CUBE_BOTTOM, CUBE_LEFT, CUBE_RIGHT, CUBE_TOP, LEFT_EYE, RIGHT_EYE,
VIEWER,
};
// How far off the ground are the viewer's eyes?
const HEIGHT: f32 = 1.0;
// What is half the vertical field of view?
const FOV_UP: f32 = 45.0;
// Some guesstimated numbers, hopefully it doesn't matter if these are off by a bit.
// What the distance between the viewer's eyes?
const INTER_PUPILLARY_DISTANCE: f32 = 0.06;
// What is the size of a pixel?
const PIXELS_PER_METRE: f32 = 6000.0;
pub trait GlWindow {
fn get_render_target(
&self,
device: &mut SurfmanDevice,
context: &mut SurfmanContext,
) -> GlWindowRenderTarget;
fn get_rotation(&self) -> Rotation3D<f32, UnknownUnit, UnknownUnit>;
fn get_translation(&self) -> Vector3D<f32, UnknownUnit>;
fn get_mode(&self) -> GlWindowMode {
GlWindowMode::Blit
}
fn display_handle(&self) -> DisplayHandle;
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum GlWindowMode {
Blit,
StereoLeftRight,
StereoRedCyan,
Cubemap,
Spherical,
}
pub enum GlWindowRenderTarget {
NativeWidget(NativeWidget),
SwapChain(SwapChain<SurfmanDevice>),
}
pub struct GlWindowDiscovery {
connection: Connection,
adapter: Adapter,
context_attributes: ContextAttributes,
window: Rc<dyn GlWindow>,
}
impl GlWindowDiscovery {
pub fn new(window: Rc<dyn GlWindow>) -> GlWindowDiscovery {
let connection = Connection::from_display_handle(window.display_handle()).unwrap();
let adapter = connection.create_adapter().unwrap();
let flags = ContextAttributeFlags::ALPHA
| ContextAttributeFlags::DEPTH
| ContextAttributeFlags::STENCIL;
let version = match connection.gl_api() {
GLApi::GLES => GLVersion { major: 3, minor: 0 },
GLApi::GL => GLVersion { major: 3, minor: 2 },
};
let context_attributes = ContextAttributes { flags, version };
GlWindowDiscovery {
connection,
adapter,
context_attributes,
window,
}
}
}
impl DiscoveryAPI<SurfmanGL> for GlWindowDiscovery {
fn request_session(
&mut self,
mode: SessionMode,
init: &SessionInit,
xr: SessionBuilder<SurfmanGL>,
) -> Result<Session, Error> {
if self.supports_session(mode) {
let granted_features = init.validate(mode, &["local-floor".into()])?;
let connection = self.connection.clone();
let adapter = self.adapter.clone();
let context_attributes = self.context_attributes.clone();
let window = self.window.clone();
xr.run_on_main_thread(move |grand_manager| {
GlWindowDevice::new(
connection,
adapter,
context_attributes,
window,
granted_features,
grand_manager,
)
})
} else {
Err(Error::NoMatchingDevice)
}
}
fn supports_session(&self, mode: SessionMode) -> bool {
mode == SessionMode::ImmersiveVR || mode == SessionMode::ImmersiveAR
}
}
pub struct GlWindowDevice {
device: SurfmanDevice,
context: SurfmanContext,
gl: Rc<Gl>,
window: Rc<dyn GlWindow>,
grand_manager: LayerGrandManager<SurfmanGL>,
layer_manager: Option<LayerManager>,
target_swap_chain: Option<SwapChain<SurfmanDevice>>,
swap_chains: SwapChains<LayerId, SurfmanDevice>,
read_fbo: Option<gl::NativeFramebuffer>,
events: EventBuffer,
clip_planes: ClipPlanes,
granted_features: Vec<String>,
shader: Option<GlWindowShader>,
}
impl DeviceAPI for GlWindowDevice {
fn floor_transform(&self) -> Option<RigidTransform3D<f32, Native, Floor>> {
let translation = Vector3D::new(0.0, HEIGHT, 0.0);
Some(RigidTransform3D::from_translation(translation))
}
fn viewports(&self) -> Viewports {
let size = self.viewport_size();
let viewports = match self.window.get_mode() {
GlWindowMode::Cubemap | GlWindowMode::Spherical => vec![
Rect::new(Point2D::new(size.width * 1, size.height * 1), size),
Rect::new(Point2D::new(size.width * 0, size.height * 1), size),
Rect::new(Point2D::new(size.width * 2, size.height * 1), size),
Rect::new(Point2D::new(size.width * 2, size.height * 0), size),
Rect::new(Point2D::new(size.width * 0, size.height * 0), size),
Rect::new(Point2D::new(size.width * 1, size.height * 0), size),
],
GlWindowMode::Blit | GlWindowMode::StereoLeftRight | GlWindowMode::StereoRedCyan => {
vec![
Rect::new(Point2D::default(), size),
Rect::new(Point2D::new(size.width, 0), size),
]
}
};
Viewports { viewports }
}
fn create_layer(&mut self, context_id: ContextId, init: LayerInit) -> Result<LayerId, Error> {
self.layer_manager()?.create_layer(context_id, init)
}
fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId) {
self.layer_manager()
.unwrap()
.destroy_layer(context_id, layer_id)
}
fn begin_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Option<Frame> {
log::debug!("Begin animation frame for layers {:?}", layers);
let translation = Vector3D::from_untyped(self.window.get_translation());
let translation: RigidTransform3D<_, _, Native> =
RigidTransform3D::from_translation(translation);
let rotation = Rotation3D::from_untyped(&self.window.get_rotation());
let rotation = RigidTransform3D::from_rotation(rotation);
let transform = translation.then(&rotation);
let sub_images = self.layer_manager().ok()?.begin_frame(layers).ok()?;
Some(Frame {
pose: Some(ViewerPose {
transform,
views: self.views(transform),
}),
inputs: vec![],
events: vec![],
sub_images,
hit_test_results: vec![],
predicted_display_time: 0.0,
})
}
fn end_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) {
log::debug!("End animation frame for layers {:?}", layers);
self.device.make_context_current(&self.context).unwrap();
debug_assert_eq!(unsafe { self.gl.get_error() }, gl::NO_ERROR);
let _ = self.layer_manager().unwrap().end_frame(layers);
let window_size = self.window_size();
let viewport_size = self.viewport_size();
let framebuffer_object = self
.device
.context_surface_info(&self.context)
.unwrap()
.map(|info| info.framebuffer_object)
.unwrap_or(0);
unsafe {
self.gl
.bind_framebuffer(gl::FRAMEBUFFER, framebuffer(framebuffer_object));
debug_assert_eq!(
(
self.gl.get_error(),
self.gl.check_framebuffer_status(gl::FRAMEBUFFER)
),
(gl::NO_ERROR, gl::FRAMEBUFFER_COMPLETE)
);
self.gl.clear_color(0.0, 0.0, 0.0, 0.0);
self.gl.clear(gl::COLOR_BUFFER_BIT);
debug_assert_eq!(self.gl.get_error(), gl::NO_ERROR);
}
for &(_, layer_id) in layers {
let swap_chain = match self.swap_chains.get(layer_id) {
Some(swap_chain) => swap_chain,
None => continue,
};
let surface = match swap_chain.take_surface() {
Some(surface) => surface,
None => return,
};
let texture_size = self.device.surface_info(&surface).size;
let surface_texture = self
.device
.create_surface_texture(&mut self.context, surface)
.unwrap();
let raw_texture_id = self.device.surface_texture_object(&surface_texture);
let texture_id = NonZeroU32::new(raw_texture_id).map(gl::NativeTexture);
let texture_target = self.device.surface_gl_texture_target();
log::debug!("Presenting texture {}", raw_texture_id);
if let Some(ref shader) = self.shader {
shader.draw_texture(
texture_id,
texture_target,
texture_size,
viewport_size,
window_size,
);
} else {
self.blit_texture(texture_id, texture_target, texture_size, window_size);
}
debug_assert_eq!(unsafe { self.gl.get_error() }, gl::NO_ERROR);
let surface = self
.device
.destroy_surface_texture(&mut self.context, surface_texture)
.unwrap();
swap_chain.recycle_surface(surface);
}
match self.target_swap_chain.as_ref() {
Some(target_swap_chain) => {
// Rendering to a surfman swap chain
target_swap_chain
.swap_buffers(&mut self.device, &mut self.context, PreserveBuffer::No)
.unwrap();
}
None => {
// Rendering to a native widget
let mut surface = self
.device
.unbind_surface_from_context(&mut self.context)
.unwrap()
.unwrap();
self.device
.present_surface(&self.context, &mut surface)
.unwrap();
self.device
.bind_surface_to_context(&mut self.context, surface)
.unwrap();
}
}
debug_assert_eq!(unsafe { self.gl.get_error() }, gl::NO_ERROR);
}
fn initial_inputs(&self) -> Vec<InputSource> {
vec![]
}
fn set_event_dest(&mut self, dest: Sender<Event>) {
self.events.upgrade(dest)
}
fn quit(&mut self) {
self.events.callback(Event::SessionEnd);
}
fn set_quitter(&mut self, _: Quitter) {
// Glwindow currently doesn't have any way to end its own session
// XXXManishearth add something for this that listens for the window
// being closed
}
fn update_clip_planes(&mut self, near: f32, far: f32) {
self.clip_planes.update(near, far)
}
fn granted_features(&self) -> &[String] {
&self.granted_features
}
}
impl Drop for GlWindowDevice {
fn drop(&mut self) {
if let Some(read_fbo) = self.read_fbo {
unsafe {
self.gl.delete_framebuffer(read_fbo);
}
}
let _ = self.device.destroy_context(&mut self.context);
}
}
impl GlWindowDevice {
fn new(
connection: Connection,
adapter: Adapter,
context_attributes: ContextAttributes,
window: Rc<dyn GlWindow>,
granted_features: Vec<String>,
grand_manager: LayerGrandManager<SurfmanGL>,
) -> Result<GlWindowDevice, Error> {
let mut device = connection.create_device(&adapter).unwrap();
let context_descriptor = device
.create_context_descriptor(&context_attributes)
.unwrap();
let mut context = device.create_context(&context_descriptor, None).unwrap();
device.make_context_current(&context).unwrap();
let gl = Rc::new(unsafe {
match device.gl_api() {
GLApi::GL => Gl::from_loader_function(|symbol_name| {
device.get_proc_address(&context, symbol_name)
}),
GLApi::GLES => Gl::from_loader_function(|symbol_name| {
device.get_proc_address(&context, symbol_name)
}),
}
});
let target_swap_chain = match window.get_render_target(&mut device, &mut context) {
GlWindowRenderTarget::NativeWidget(native_widget) => {
let surface_type = SurfaceType::Widget { native_widget };
let surface = device
.create_surface(&context, SurfaceAccess::GPUOnly, surface_type)
.unwrap();
device
.bind_surface_to_context(&mut context, surface)
.unwrap();
None
}
GlWindowRenderTarget::SwapChain(target_swap_chain) => {
debug_assert!(target_swap_chain.is_attached());
Some(target_swap_chain)
}
};
let read_fbo = unsafe { gl.create_framebuffer().ok() };
unsafe {
let framebuffer_object = device
.context_surface_info(&context)
.unwrap()
.map(|info| info.framebuffer_object)
.unwrap_or(0);
gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer(framebuffer_object));
debug_assert_eq!(
(gl.get_error(), gl.check_framebuffer_status(gl::FRAMEBUFFER)),
(gl::NO_ERROR, gl::FRAMEBUFFER_COMPLETE)
);
gl.enable(gl::BLEND);
gl.blend_func_separate(
gl::SRC_ALPHA,
gl::ONE_MINUS_SRC_ALPHA,
gl::ONE,
gl::ONE_MINUS_SRC_ALPHA,
);
}
let swap_chains = SwapChains::new();
let layer_manager = None;
let shader = GlWindowShader::new(gl.clone(), window.get_mode());
debug_assert_eq!(unsafe { gl.get_error() }, gl::NO_ERROR);
Ok(GlWindowDevice {
gl,
window,
device,
context,
read_fbo,
swap_chains,
target_swap_chain,
grand_manager,
layer_manager,
events: Default::default(),
clip_planes: Default::default(),
granted_features,
shader,
})
}
fn blit_texture(
&self,
texture_id: Option<gl::NativeTexture>,
texture_target: u32,
texture_size: Size2D<i32, UnknownUnit>,
window_size: Size2D<i32, Viewport>,
) {
unsafe {
self.gl
.bind_framebuffer(gl::READ_FRAMEBUFFER, self.read_fbo);
self.gl.framebuffer_texture_2d(
gl::READ_FRAMEBUFFER,
gl::COLOR_ATTACHMENT0,
texture_target,
texture_id,
0,
);
self.gl.blit_framebuffer(
0,
0,
texture_size.width,
texture_size.height,
0,
0,
window_size.width,
window_size.height,
gl::COLOR_BUFFER_BIT,
gl::NEAREST,
);
}
}
fn layer_manager(&mut self) -> Result<&mut LayerManager, Error> {
if let Some(ref mut manager) = self.layer_manager {
return Ok(manager);
}
let swap_chains = self.swap_chains.clone();
let viewports = self.viewports();
let layer_manager = self.grand_manager.create_layer_manager(move |_, _| {
Ok(SurfmanLayerManager::new(viewports, swap_chains))
})?;
self.layer_manager = Some(layer_manager);
Ok(self.layer_manager.as_mut().unwrap())
}
fn window_size(&self) -> Size2D<i32, Viewport> {
let window_size = self
.device
.context_surface_info(&self.context)
.unwrap()
.unwrap()
.size
.to_i32();
Size2D::from_untyped(window_size)
}
fn viewport_size(&self) -> Size2D<i32, Viewport> {
let window_size = self.window_size();
match self.window.get_mode() {
GlWindowMode::StereoRedCyan => {
// This device has a slightly odd characteristic, which is that anaglyphic stereo
// renders both eyes to the same surface. If we want the two eyes to be parallel,
// and to agree at distance infinity, this means gettng the XR content to render some
// wasted pixels, which are stripped off when we render to the target surface.
// (The wasted pixels are on the right of the left eye and vice versa.)
let wasted_pixels = (INTER_PUPILLARY_DISTANCE / PIXELS_PER_METRE) as i32;
Size2D::new(window_size.width + wasted_pixels, window_size.height)
}
GlWindowMode::Cubemap => {
// Cubemap viewports should be square
let size = 1.max(window_size.width / 3).max(window_size.height / 2);
Size2D::new(size, size)
}
GlWindowMode::Spherical => {
// Cubemap viewports should be square
let size = 1.max(window_size.width / 2).max(window_size.height);
Size2D::new(size, size)
}
GlWindowMode::StereoLeftRight | GlWindowMode::Blit => {
Size2D::new(window_size.width / 2, window_size.height)
}
}
}
fn views(&self, viewer: RigidTransform3D<f32, Viewer, Native>) -> Views {
match self.window.get_mode() {
GlWindowMode::Cubemap | GlWindowMode::Spherical => Views::Cubemap(
self.view(viewer, VIEWER),
self.view(viewer, CUBE_LEFT),
self.view(viewer, CUBE_RIGHT),
self.view(viewer, CUBE_TOP),
self.view(viewer, CUBE_BOTTOM),
self.view(viewer, CUBE_BACK),
),
GlWindowMode::Blit | GlWindowMode::StereoLeftRight | GlWindowMode::StereoRedCyan => {
Views::Stereo(self.view(viewer, LEFT_EYE), self.view(viewer, RIGHT_EYE))
}
}
}
fn view<Eye>(
&self,
viewer: RigidTransform3D<f32, Viewer, Native>,
eye: SomeEye<Eye>,
) -> View<Eye> {
let projection = self.perspective();
let translation = if eye == RIGHT_EYE {
Vector3D::new(-INTER_PUPILLARY_DISTANCE / 2.0, 0.0, 0.0)
} else if eye == LEFT_EYE {
Vector3D::new(INTER_PUPILLARY_DISTANCE / 2.0, 0.0, 0.0)
} else {
Vector3D::zero()
};
let rotation = if eye == CUBE_TOP {
Rotation3D::euler(
Angle::degrees(270.0),
Angle::degrees(0.0),
Angle::degrees(90.0),
)
} else if eye == CUBE_BOTTOM {
Rotation3D::euler(
Angle::degrees(90.0),
Angle::degrees(0.0),
Angle::degrees(90.0),
)
} else if eye == CUBE_LEFT {
Rotation3D::around_y(Angle::degrees(-90.0))
} else if eye == CUBE_RIGHT {
Rotation3D::around_y(Angle::degrees(90.0))
} else if eye == CUBE_BACK {
Rotation3D::euler(
Angle::degrees(180.0),
Angle::degrees(0.0),
Angle::degrees(90.0),
)
} else {
Rotation3D::identity()
};
let transform: RigidTransform3D<f32, Viewer, Eye> =
RigidTransform3D::new(rotation, translation);
View {
transform: transform.inverse().then(&viewer),
projection,
}
}
fn perspective<Eye>(&self) -> Transform3D<f32, Eye, Display> {
let near = self.clip_planes.near;
let far = self.clip_planes.far;
// https://github.com/toji/gl-matrix/blob/bd3307196563fbb331b40fc6ebecbbfcc2a4722c/src/mat4.js#L1271
let fov_up = match self.window.get_mode() {
GlWindowMode::Spherical | GlWindowMode::Cubemap => Angle::degrees(45.0),
GlWindowMode::Blit | GlWindowMode::StereoLeftRight | GlWindowMode::StereoRedCyan => {
Angle::degrees(FOV_UP)
}
};
let f = 1.0 / fov_up.radians.tan();
let nf = 1.0 / (near - far);
let viewport_size = self.viewport_size();
let aspect = viewport_size.width as f32 / viewport_size.height as f32;
// Dear rustfmt, This is a 4x4 matrix, please leave it alone. Best, ajeffrey.
{
#[rustfmt::skip]
// Sigh, row-major vs column-major
return Transform3D::new(
f / aspect, 0.0, 0.0, 0.0,
0.0, f, 0.0, 0.0,
0.0, 0.0, (far + near) * nf, -1.0,
0.0, 0.0, 2.0 * far * near * nf, 0.0,
);
}
}
}
struct GlWindowShader {
gl: Rc<Gl>,
buffer: Option<gl::NativeBuffer>,
vao: Option<gl::NativeVertexArray>,
program: gl::NativeProgram,
mode: GlWindowMode,
}
const VERTEX_ATTRIBUTE: u32 = 0;
const VERTICES: &[[f32; 2]; 4] = &[[-1.0, -1.0], [-1.0, 1.0], [1.0, -1.0], [1.0, 1.0]];
const PASSTHROUGH_VERTEX_SHADER: &str = "
#version 330 core
layout(location=0) in vec2 coord;
out vec2 vTexCoord;
void main(void) {
gl_Position = vec4(coord, 0.0, 1.0);
vTexCoord = coord * 0.5 + 0.5;
}
";
const PASSTHROUGH_FRAGMENT_SHADER: &str = "
#version 330 core
layout(location=0) out vec4 color;
uniform sampler2D image;
in vec2 vTexCoord;
void main() {
color = texture(image, vTexCoord);
}
";
const ANAGLYPH_VERTEX_SHADER: &str = "
#version 330 core
layout(location=0) in vec2 coord;
uniform float wasted; // What fraction of the image is wasted?
out vec2 left_coord;
out vec2 right_coord;
void main(void) {
gl_Position = vec4(coord, 0.0, 1.0);
vec2 coordn = coord * 0.5 + 0.5;
left_coord = vec2(mix(wasted/2, 0.5, coordn.x), coordn.y);
right_coord = vec2(mix(0.5, 1-wasted/2, coordn.x), coordn.y);
}
";
const ANAGLYPH_RED_CYAN_FRAGMENT_SHADER: &str = "
#version 330 core
layout(location=0) out vec4 color;
uniform sampler2D image;
in vec2 left_coord;
in vec2 right_coord;
void main() {
vec4 left_color = texture(image, left_coord);
vec4 right_color = texture(image, right_coord);
float red = left_color.x;
float green = right_color.y;
float blue = right_color.z;
color = vec4(red, green, blue, 1.0);
}
";
const SPHERICAL_VERTEX_SHADER: &str = "
#version 330 core
layout(location=0) in vec2 coord;
out vec2 lon_lat;
const float PI = 3.141592654;
void main(void) {
lon_lat = coord * vec2(PI, 0.5*PI);
gl_Position = vec4(coord, 0.0, 1.0);
}
";
const SPHERICAL_FRAGMENT_SHADER: &str = "
#version 330 core
layout(location=0) out vec4 color;
uniform sampler2D image;
in vec2 lon_lat;
void main() {
vec3 direction = vec3(
sin(lon_lat.x)*cos(lon_lat.y),
sin(lon_lat.y),
cos(lon_lat.x)*cos(lon_lat.y)
);
vec2 vTexCoord;
if ((direction.y > abs(direction.x)) && (direction.y > abs(direction.z))) {
// Looking up
vTexCoord.x = direction.z / (direction.y*6.0) + 5.0/6.0;
vTexCoord.y = direction.x / (direction.y*4.0) + 1.0/4.0;
} else if ((direction.y < -abs(direction.x)) && (direction.y < -abs(direction.z))) {
// Looking down
vTexCoord.x = direction.z / (direction.y*6.0) + 1.0/6.0;
vTexCoord.y = -direction.x / (direction.y*4.0) + 1.0/4.0;
} else if (direction.z < -abs(direction.x)) {
// Looking back
vTexCoord.x = -direction.y / (direction.z*6.0) + 3.0/6.0;
vTexCoord.y = -direction.x / (direction.z*4.0) + 1.0/4.0;
} else if (direction.x < -abs(direction.z)) {
// Looking left
vTexCoord.x = -direction.z / (direction.x*6.0) + 1.0/6.0;
vTexCoord.y = -direction.y / (direction.x*4.0) + 3.0/4.0;
} else if (direction.x > abs(direction.z)) {
// Looking right
vTexCoord.x = -direction.z / (direction.x*6.0) + 5.0/6.0;
vTexCoord.y = direction.y / (direction.x*4.0) + 3.0/4.0;
} else {
// Looking ahead
vTexCoord.x = direction.x / (direction.z*6.0) + 3.0/6.0;
vTexCoord.y = direction.y / (direction.z*4.0) + 3.0/4.0;
}
color = texture(image, vTexCoord);
}
";
impl GlWindowShader {
fn new(gl: Rc<Gl>, mode: GlWindowMode) -> Option<GlWindowShader> {
// The shader source
let (vertex_source, fragment_source) = match mode {
GlWindowMode::Blit => {
return None;
}
GlWindowMode::StereoLeftRight | GlWindowMode::Cubemap => {
(PASSTHROUGH_VERTEX_SHADER, PASSTHROUGH_FRAGMENT_SHADER)
}
GlWindowMode::StereoRedCyan => {
(ANAGLYPH_VERTEX_SHADER, ANAGLYPH_RED_CYAN_FRAGMENT_SHADER)
}
GlWindowMode::Spherical => (SPHERICAL_VERTEX_SHADER, SPHERICAL_FRAGMENT_SHADER),
};
// TODO: work out why shaders don't work on macos
if cfg!(target_os = "macos") {
log::warn!("XR shaders may not render on MacOS.");
}
unsafe {
// The four corners of the window in a VAO, set to attribute 0
let buffer = gl.create_buffer().ok();
let vao = gl.create_vertex_array().ok();
gl.bind_buffer(gl::ARRAY_BUFFER, buffer);
let data =
slice::from_raw_parts(VERTICES as *const _ as _, std::mem::size_of_val(VERTICES));
gl.buffer_data_u8_slice(gl::ARRAY_BUFFER, data, gl::STATIC_DRAW);
gl.bind_vertex_array(vao);
gl.vertex_attrib_pointer_f32(
VERTEX_ATTRIBUTE,
VERTICES[0].len() as i32,
gl::FLOAT,
false,
0,
0,
);
gl.enable_vertex_attrib_array(VERTEX_ATTRIBUTE);
debug_assert_eq!(gl.get_error(), gl::NO_ERROR);
// The shader program
let program = gl.create_program().unwrap();
let vertex_shader = gl.create_shader(gl::VERTEX_SHADER).unwrap();
let fragment_shader = gl.create_shader(gl::FRAGMENT_SHADER).unwrap();
gl.shader_source(vertex_shader, vertex_source);
gl.compile_shader(vertex_shader);
gl.attach_shader(program, vertex_shader);
gl.shader_source(fragment_shader, fragment_source);
gl.compile_shader(fragment_shader);
gl.attach_shader(program, fragment_shader);
gl.link_program(program);
debug_assert_eq!(gl.get_error(), gl::NO_ERROR);
// Check for errors
// TODO: something other than panic?
let status = gl.get_shader_compile_status(vertex_shader);
assert!(
status,
"Failed to compile vertex shader: {}",
gl.get_shader_info_log(vertex_shader)
);
let status = gl.get_shader_compile_status(fragment_shader);
assert!(
status,
"Failed to compile fragment shader: {}",
gl.get_shader_info_log(fragment_shader)
);
let status = gl.get_program_link_status(program);
assert!(
status,
"Failed to link: {}",
gl.get_program_info_log(program)
);
// Clean up
gl.delete_shader(vertex_shader);
debug_assert_eq!(gl.get_error(), gl::NO_ERROR);
gl.delete_shader(fragment_shader);
debug_assert_eq!(gl.get_error(), gl::NO_ERROR);
// And we're done
Some(GlWindowShader {
gl,
buffer,
vao,
program,
mode,
})
}
}
fn draw_texture(
&self,
texture_id: Option<gl::NativeTexture>,
texture_target: u32,
texture_size: Size2D<i32, UnknownUnit>,
viewport_size: Size2D<i32, Viewport>,
window_size: Size2D<i32, Viewport>,
) {
unsafe {
self.gl.use_program(Some(self.program));
self.gl.enable_vertex_attrib_array(VERTEX_ATTRIBUTE);
self.gl.vertex_attrib_pointer_f32(
VERTEX_ATTRIBUTE,
VERTICES[0].len() as i32,
gl::FLOAT,
false,
0,
0,
);
debug_assert_eq!(self.gl.get_error(), gl::NO_ERROR);
self.gl.active_texture(gl::TEXTURE0);
self.gl.bind_texture(texture_target, texture_id);
match self.mode {
GlWindowMode::StereoRedCyan => {
let wasted = 1.0
- (texture_size.width as f32 / viewport_size.width as f32)
.max(0.0)
.min(1.0);
let wasted_location = self.gl.get_uniform_location(self.program, "wasted");
self.gl.uniform_1_f32(wasted_location.as_ref(), wasted);
}
GlWindowMode::Blit
| GlWindowMode::Cubemap
| GlWindowMode::Spherical
| GlWindowMode::StereoLeftRight => {}
}
self.gl
.viewport(0, 0, window_size.width, window_size.height);
self.gl
.draw_arrays(gl::TRIANGLE_STRIP, 0, VERTICES.len() as i32);
self.gl.disable_vertex_attrib_array(VERTEX_ATTRIBUTE);
debug_assert_eq!(self.gl.get_error(), gl::NO_ERROR);
}
}
}
impl Drop for GlWindowShader {
fn drop(&mut self) {
unsafe {
if let Some(buffer) = self.buffer {
self.gl.delete_buffer(buffer);
}
if let Some(vao) = self.vao {
self.gl.delete_vertex_array(vao);
}
self.gl.delete_program(self.program);
}
}
}

View file

@ -0,0 +1,564 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::SurfmanGL;
use crate::SurfmanLayerManager;
use euclid::{Point2D, RigidTransform3D};
use std::sync::{Arc, Mutex};
use std::thread;
use surfman::chains::SwapChains;
use webxr_api::util::{self, ClipPlanes, HitTestList};
use webxr_api::{
ApiSpace, BaseSpace, ContextId, DeviceAPI, DiscoveryAPI, Error, Event, EventBuffer, Floor,
Frame, FrameUpdateEvent, HitTestId, HitTestResult, HitTestSource, Input, InputFrame, InputId,
InputSource, LayerGrandManager, LayerId, LayerInit, LayerManager, MockButton, MockDeviceInit,
MockDeviceMsg, MockDiscoveryAPI, MockInputMsg, MockViewInit, MockViewsInit, MockWorld, Native,
Quitter, Ray, Receiver, SelectEvent, SelectKind, Sender, Session, SessionBuilder, SessionInit,
SessionMode, Space, SubImages, View, Viewer, ViewerPose, Viewports, Views,
};
pub struct HeadlessMockDiscovery {}
struct HeadlessDiscovery {
data: Arc<Mutex<HeadlessDeviceData>>,
supports_vr: bool,
supports_inline: bool,
supports_ar: bool,
}
struct InputInfo {
source: InputSource,
active: bool,
pointer: Option<RigidTransform3D<f32, Input, Native>>,
grip: Option<RigidTransform3D<f32, Input, Native>>,
clicking: bool,
buttons: Vec<MockButton>,
}
struct HeadlessDevice {
data: Arc<Mutex<HeadlessDeviceData>>,
id: u32,
hit_tests: HitTestList,
granted_features: Vec<String>,
grand_manager: LayerGrandManager<SurfmanGL>,
layer_manager: Option<LayerManager>,
}
struct PerSessionData {
id: u32,
mode: SessionMode,
clip_planes: ClipPlanes,
quitter: Option<Quitter>,
events: EventBuffer,
needs_vp_update: bool,
}
struct HeadlessDeviceData {
floor_transform: Option<RigidTransform3D<f32, Native, Floor>>,
viewer_origin: Option<RigidTransform3D<f32, Viewer, Native>>,
supported_features: Vec<String>,
views: MockViewsInit,
needs_floor_update: bool,
inputs: Vec<InputInfo>,
sessions: Vec<PerSessionData>,
disconnected: bool,
world: Option<MockWorld>,
next_id: u32,
bounds_geometry: Vec<Point2D<f32, Floor>>,
}
impl MockDiscoveryAPI<SurfmanGL> for HeadlessMockDiscovery {
fn simulate_device_connection(
&mut self,
init: MockDeviceInit,
receiver: Receiver<MockDeviceMsg>,
) -> Result<Box<dyn DiscoveryAPI<SurfmanGL>>, Error> {
let viewer_origin = init.viewer_origin.clone();
let floor_transform = init.floor_origin.map(|f| f.inverse());
let views = init.views.clone();
let data = HeadlessDeviceData {
floor_transform,
viewer_origin,
supported_features: init.supported_features,
views,
needs_floor_update: false,
inputs: vec![],
sessions: vec![],
disconnected: false,
world: init.world,
next_id: 0,
bounds_geometry: vec![],
};
let data = Arc::new(Mutex::new(data));
let data_ = data.clone();
thread::spawn(move || {
run_loop(receiver, data_);
});
Ok(Box::new(HeadlessDiscovery {
data,
supports_vr: init.supports_vr,
supports_inline: init.supports_inline,
supports_ar: init.supports_ar,
}))
}
}
fn run_loop(receiver: Receiver<MockDeviceMsg>, data: Arc<Mutex<HeadlessDeviceData>>) {
while let Ok(msg) = receiver.recv() {
if !data.lock().expect("Mutex poisoned").handle_msg(msg) {
break;
}
}
}
impl DiscoveryAPI<SurfmanGL> for HeadlessDiscovery {
fn request_session(
&mut self,
mode: SessionMode,
init: &SessionInit,
xr: SessionBuilder<SurfmanGL>,
) -> Result<Session, Error> {
if !self.supports_session(mode) {
return Err(Error::NoMatchingDevice);
}
let data = self.data.clone();
let mut d = data.lock().unwrap();
let id = d.next_id;
d.next_id += 1;
let per_session = PerSessionData {
id,
mode,
clip_planes: Default::default(),
quitter: Default::default(),
events: Default::default(),
needs_vp_update: false,
};
d.sessions.push(per_session);
let granted_features = init.validate(mode, &d.supported_features)?;
let layer_manager = None;
drop(d);
xr.spawn(move |grand_manager| {
Ok(HeadlessDevice {
data,
id,
granted_features,
hit_tests: HitTestList::default(),
grand_manager,
layer_manager,
})
})
}
fn supports_session(&self, mode: SessionMode) -> bool {
if self.data.lock().unwrap().disconnected {
return false;
}
match mode {
SessionMode::Inline => self.supports_inline,
SessionMode::ImmersiveVR => self.supports_vr,
SessionMode::ImmersiveAR => self.supports_ar,
}
}
}
fn view<Eye>(
init: MockViewInit<Eye>,
viewer: RigidTransform3D<f32, Viewer, Native>,
clip_planes: ClipPlanes,
) -> View<Eye> {
let projection = if let Some((l, r, t, b)) = init.fov {
util::fov_to_projection_matrix(l, r, t, b, clip_planes)
} else {
init.projection
};
View {
transform: init.transform.inverse().then(&viewer),
projection,
}
}
impl HeadlessDevice {
fn with_per_session<R>(&self, f: impl FnOnce(&mut PerSessionData) -> R) -> R {
f(self
.data
.lock()
.unwrap()
.sessions
.iter_mut()
.find(|s| s.id == self.id)
.unwrap())
}
fn layer_manager(&mut self) -> Result<&mut LayerManager, Error> {
if let Some(ref mut manager) = self.layer_manager {
return Ok(manager);
}
let swap_chains = SwapChains::new();
let viewports = self.viewports();
let layer_manager = self.grand_manager.create_layer_manager(move |_, _| {
Ok(SurfmanLayerManager::new(viewports, swap_chains))
})?;
self.layer_manager = Some(layer_manager);
Ok(self.layer_manager.as_mut().unwrap())
}
}
impl DeviceAPI for HeadlessDevice {
fn floor_transform(&self) -> Option<RigidTransform3D<f32, Native, Floor>> {
self.data.lock().unwrap().floor_transform.clone()
}
fn viewports(&self) -> Viewports {
let d = self.data.lock().unwrap();
let per_session = d.sessions.iter().find(|s| s.id == self.id).unwrap();
d.viewports(per_session.mode)
}
fn create_layer(&mut self, context_id: ContextId, init: LayerInit) -> Result<LayerId, Error> {
self.layer_manager()?.create_layer(context_id, init)
}
fn destroy_layer(&mut self, context_id: ContextId, layer_id: LayerId) {
self.layer_manager()
.unwrap()
.destroy_layer(context_id, layer_id)
}
fn begin_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) -> Option<Frame> {
let sub_images = self.layer_manager().ok()?.begin_frame(layers).ok()?;
let mut data = self.data.lock().unwrap();
let mut frame = data.get_frame(
data.sessions.iter().find(|s| s.id == self.id).unwrap(),
sub_images,
);
let per_session = data.sessions.iter_mut().find(|s| s.id == self.id).unwrap();
if per_session.needs_vp_update {
per_session.needs_vp_update = false;
let mode = per_session.mode;
let vp = data.viewports(mode);
frame.events.push(FrameUpdateEvent::UpdateViewports(vp));
}
let events = self.hit_tests.commit_tests();
frame.events = events;
if let Some(ref world) = data.world {
for source in self.hit_tests.tests() {
let ray = data.native_ray(source.ray, source.space);
let ray = if let Some(ray) = ray { ray } else { break };
let hits = world
.regions
.iter()
.filter(|region| source.types.is_type(region.ty))
.flat_map(|region| &region.faces)
.filter_map(|triangle| triangle.intersect(ray))
.map(|space| HitTestResult {
space,
id: source.id,
});
frame.hit_test_results.extend(hits);
}
}
if data.needs_floor_update {
frame.events.push(FrameUpdateEvent::UpdateFloorTransform(
data.floor_transform.clone(),
));
data.needs_floor_update = false;
}
Some(frame)
}
fn end_animation_frame(&mut self, layers: &[(ContextId, LayerId)]) {
let _ = self.layer_manager().unwrap().end_frame(layers);
thread::sleep(std::time::Duration::from_millis(20));
}
fn initial_inputs(&self) -> Vec<InputSource> {
vec![]
}
fn set_event_dest(&mut self, dest: Sender<Event>) {
self.with_per_session(|s| s.events.upgrade(dest))
}
fn quit(&mut self) {
self.with_per_session(|s| s.events.callback(Event::SessionEnd))
}
fn set_quitter(&mut self, quitter: Quitter) {
self.with_per_session(|s| s.quitter = Some(quitter))
}
fn update_clip_planes(&mut self, near: f32, far: f32) {
self.with_per_session(|s| s.clip_planes.update(near, far));
}
fn granted_features(&self) -> &[String] {
&self.granted_features
}
fn request_hit_test(&mut self, source: HitTestSource) {
self.hit_tests.request_hit_test(source)
}
fn cancel_hit_test(&mut self, id: HitTestId) {
self.hit_tests.cancel_hit_test(id)
}
fn reference_space_bounds(&self) -> Option<Vec<Point2D<f32, Floor>>> {
let bounds = self.data.lock().unwrap().bounds_geometry.clone();
Some(bounds)
}
}
impl HeadlessMockDiscovery {
pub fn new() -> HeadlessMockDiscovery {
HeadlessMockDiscovery {}
}
}
macro_rules! with_all_sessions {
($self:ident, |$s:ident| $e:expr) => {
for $s in &mut $self.sessions {
$e;
}
};
}
impl HeadlessDeviceData {
fn get_frame(&self, s: &PerSessionData, sub_images: Vec<SubImages>) -> Frame {
let views = self.views.clone();
let pose = self.viewer_origin.map(|transform| {
let views = if s.mode == SessionMode::Inline {
Views::Inline
} else {
match views {
MockViewsInit::Mono(one) => Views::Mono(view(one, transform, s.clip_planes)),
MockViewsInit::Stereo(one, two) => Views::Stereo(
view(one, transform, s.clip_planes),
view(two, transform, s.clip_planes),
),
}
};
ViewerPose { transform, views }
});
let inputs = self
.inputs
.iter()
.filter(|i| i.active)
.map(|i| InputFrame {
id: i.source.id,
target_ray_origin: i.pointer,
grip_origin: i.grip,
pressed: false,
squeezed: false,
hand: None,
button_values: vec![],
axis_values: vec![],
input_changed: false,
})
.collect();
Frame {
pose,
inputs,
events: vec![],
sub_images,
hit_test_results: vec![],
predicted_display_time: 0.0,
}
}
fn viewports(&self, mode: SessionMode) -> Viewports {
let vec = if mode == SessionMode::Inline {
vec![]
} else {
match &self.views {
MockViewsInit::Mono(one) => vec![one.viewport],
MockViewsInit::Stereo(one, two) => vec![one.viewport, two.viewport],
}
};
Viewports { viewports: vec }
}
fn trigger_select(&mut self, id: InputId, kind: SelectKind, event: SelectEvent) {
for i in 0..self.sessions.len() {
let frame = self.get_frame(&self.sessions[i], Vec::new());
self.sessions[i]
.events
.callback(Event::Select(id, kind, event, frame));
}
}
fn handle_msg(&mut self, msg: MockDeviceMsg) -> bool {
match msg {
MockDeviceMsg::SetWorld(w) => self.world = Some(w),
MockDeviceMsg::ClearWorld => self.world = None,
MockDeviceMsg::SetViewerOrigin(viewer_origin) => {
self.viewer_origin = viewer_origin;
}
MockDeviceMsg::SetFloorOrigin(floor_origin) => {
self.floor_transform = floor_origin.map(|f| f.inverse());
self.needs_floor_update = true;
}
MockDeviceMsg::SetViews(views) => {
self.views = views;
with_all_sessions!(self, |s| {
s.needs_vp_update = true;
})
}
MockDeviceMsg::VisibilityChange(v) => {
with_all_sessions!(self, |s| s.events.callback(Event::VisibilityChange(v)))
}
MockDeviceMsg::AddInputSource(init) => {
self.inputs.push(InputInfo {
source: init.source.clone(),
pointer: init.pointer_origin,
grip: init.grip_origin,
active: true,
clicking: false,
buttons: init.supported_buttons,
});
with_all_sessions!(self, |s| s
.events
.callback(Event::AddInput(init.source.clone())))
}
MockDeviceMsg::MessageInputSource(id, msg) => {
if let Some(ref mut input) = self.inputs.iter_mut().find(|i| i.source.id == id) {
match msg {
MockInputMsg::SetHandedness(h) => {
input.source.handedness = h;
with_all_sessions!(self, |s| {
s.events
.callback(Event::UpdateInput(id, input.source.clone()))
});
}
MockInputMsg::SetProfiles(p) => {
input.source.profiles = p;
with_all_sessions!(self, |s| {
s.events
.callback(Event::UpdateInput(id, input.source.clone()))
});
}
MockInputMsg::SetTargetRayMode(t) => {
input.source.target_ray_mode = t;
with_all_sessions!(self, |s| {
s.events
.callback(Event::UpdateInput(id, input.source.clone()))
});
}
MockInputMsg::SetPointerOrigin(p) => input.pointer = p,
MockInputMsg::SetGripOrigin(p) => input.grip = p,
MockInputMsg::TriggerSelect(kind, event) => {
if !input.active {
return true;
}
let clicking = input.clicking;
input.clicking = event == SelectEvent::Start;
match event {
SelectEvent::Start => {
self.trigger_select(id, kind, event);
}
SelectEvent::End => {
if clicking {
self.trigger_select(id, kind, SelectEvent::Select);
} else {
self.trigger_select(id, kind, SelectEvent::End);
}
}
SelectEvent::Select => {
self.trigger_select(id, kind, SelectEvent::Start);
self.trigger_select(id, kind, SelectEvent::Select);
}
}
}
MockInputMsg::Disconnect => {
if input.active {
with_all_sessions!(self, |s| s
.events
.callback(Event::RemoveInput(input.source.id)));
input.active = false;
input.clicking = false;
}
}
MockInputMsg::Reconnect => {
if !input.active {
with_all_sessions!(self, |s| s
.events
.callback(Event::AddInput(input.source.clone())));
input.active = true;
}
}
MockInputMsg::SetSupportedButtons(buttons) => {
input.buttons = buttons;
with_all_sessions!(self, |s| s.events.callback(Event::UpdateInput(
input.source.id,
input.source.clone()
)));
}
MockInputMsg::UpdateButtonState(state) => {
if let Some(button) = input
.buttons
.iter_mut()
.find(|b| b.button_type == state.button_type)
{
*button = state;
}
}
}
}
}
MockDeviceMsg::Disconnect(s) => {
self.disconnected = true;
with_all_sessions!(self, |s| s.quitter.as_ref().map(|q| q.quit()));
// notify the client that we're done disconnecting
let _ = s.send(());
return false;
}
MockDeviceMsg::SetBoundsGeometry(g) => {
self.bounds_geometry = g;
}
MockDeviceMsg::SimulateResetPose => {
with_all_sessions!(self, |s| s.events.callback(Event::ReferenceSpaceChanged(
BaseSpace::Local,
RigidTransform3D::identity()
)));
}
}
true
}
fn native_ray(&self, ray: Ray<ApiSpace>, space: Space) -> Option<Ray<Native>> {
let origin: RigidTransform3D<f32, ApiSpace, Native> = match space.base {
BaseSpace::Local => RigidTransform3D::identity(),
BaseSpace::Floor => self.floor_transform?.inverse().cast_unit(),
BaseSpace::Viewer => self.viewer_origin?.cast_unit(),
BaseSpace::BoundedFloor => self.floor_transform?.inverse().cast_unit(),
BaseSpace::TargetRay(id) => self
.inputs
.iter()
.find(|i| i.source.id == id)?
.pointer?
.cast_unit(),
BaseSpace::Grip(id) => self
.inputs
.iter()
.find(|i| i.source.id == id)?
.grip?
.cast_unit(),
BaseSpace::Joint(..) => panic!("Cannot request mocking backend with hands"),
};
let space_origin = space.offset.then(&origin);
let origin_rigid: RigidTransform3D<f32, ApiSpace, ApiSpace> = ray.origin.into();
Some(Ray {
origin: origin_rigid.then(&space_origin).translation,
direction: space_origin.rotation.transform_vector3d(ray.direction),
})
}
}

22
components/webxr/lib.rs Normal file
View file

@ -0,0 +1,22 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! This crate defines the Rust implementation of WebXR for various devices.
#[cfg(feature = "glwindow")]
pub mod glwindow;
#[cfg(feature = "headless")]
pub mod headless;
#[cfg(feature = "openxr-api")]
pub mod openxr;
pub mod surfman_layer_manager;
pub use surfman_layer_manager::SurfmanGL;
pub use surfman_layer_manager::SurfmanLayerManager;
pub type MainThreadRegistry = webxr_api::MainThreadRegistry<surfman_layer_manager::SurfmanGL>;
pub type Discovery = Box<dyn webxr_api::DiscoveryAPI<SurfmanGL>>;
pub(crate) mod gl_utils;

View file

@ -0,0 +1,25 @@
use euclid::{Size2D, UnknownUnit};
use openxr::{ExtensionSet, FrameStream, FrameWaiter, Graphics, Instance, Session, SystemId};
use surfman::Context as SurfmanContext;
use surfman::Device as SurfmanDevice;
use surfman::Error as SurfmanError;
use surfman::SurfaceTexture;
use webxr_api::Error;
pub enum GraphicsProvider {}
pub trait GraphicsProviderMethods<G: Graphics> {
fn enable_graphics_extensions(exts: &mut ExtensionSet);
fn pick_format(formats: &[u32]) -> u32;
fn create_session(
device: &SurfmanDevice,
instance: &Instance,
system: SystemId,
) -> Result<(Session<G>, FrameWaiter, FrameStream<G>), Error>;
fn surface_texture_from_swapchain_texture(
image: <G as Graphics>::SwapchainImage,
device: &mut SurfmanDevice,
context: &mut SurfmanContext,
size: &Size2D<i32, UnknownUnit>,
) -> Result<SurfaceTexture, SurfmanError>;
}

View file

@ -0,0 +1,139 @@
use std::{mem, ptr};
use euclid::{Size2D, UnknownUnit};
use log::warn;
use openxr::d3d::{Requirements, SessionCreateInfoD3D11, D3D11};
use openxr::{
ExtensionSet, FormFactor, FrameStream, FrameWaiter, Graphics, Instance, Session, SystemId,
};
use surfman::Adapter as SurfmanAdapter;
use surfman::Context as SurfmanContext;
use surfman::Device as SurfmanDevice;
use surfman::Error as SurfmanError;
use surfman::SurfaceTexture;
use webxr_api::Error;
use winapi::shared::winerror::{DXGI_ERROR_NOT_FOUND, S_OK};
use winapi::shared::{dxgi, dxgiformat};
use winapi::um::d3d11::ID3D11Texture2D;
use winapi::Interface;
use wio::com::ComPtr;
use crate::openxr::graphics::{GraphicsProvider, GraphicsProviderMethods};
use crate::openxr::{create_instance, AppInfo};
pub type Backend = D3D11;
impl GraphicsProviderMethods<D3D11> for GraphicsProvider {
fn enable_graphics_extensions(exts: &mut ExtensionSet) {
exts.khr_d3d11_enable = true;
}
fn pick_format(formats: &[u32]) -> u32 {
// TODO: extract the format from surfman's device and pick a matching
// valid format based on that. For now, assume that eglChooseConfig will
// gravitate to B8G8R8A8.
warn!("Available formats: {:?}", formats);
for format in formats {
match *format {
dxgiformat::DXGI_FORMAT_B8G8R8A8_UNORM_SRGB => return *format,
dxgiformat::DXGI_FORMAT_B8G8R8A8_UNORM => return *format,
//dxgiformat::DXGI_FORMAT_R8G8B8A8_UNORM => return *format,
f => {
warn!("Backend requested unsupported format {:?}", f);
}
}
}
panic!("No formats supported amongst {:?}", formats);
}
fn create_session(
device: &SurfmanDevice,
instance: &Instance,
system: SystemId,
) -> Result<(Session<D3D11>, FrameWaiter, FrameStream<D3D11>), Error> {
// Get the current surfman device and extract its D3D device. This will ensure
// that the OpenXR runtime's texture will be shareable with surfman's surfaces.
let native_device = device.native_device();
let d3d_device = native_device.d3d11_device;
// FIXME: we should be using these graphics requirements to drive the actual
// d3d device creation, rather than assuming the device that surfman
// already created is appropriate. OpenXR returns a validation error
// unless we call this method, so we call it and ignore the results
// in the short term.
let _requirements = D3D11::requirements(&instance, system)
.map_err(|e| Error::BackendSpecific(format!("D3D11::requirements {:?}", e)))?;
unsafe {
instance
.create_session::<D3D11>(
system,
&SessionCreateInfoD3D11 {
device: d3d_device as *mut _,
},
)
.map_err(|e| Error::BackendSpecific(format!("Instance::create_session {:?}", e)))
}
}
fn surface_texture_from_swapchain_texture(
image: <D3D11 as Graphics>::SwapchainImage,
device: &mut SurfmanDevice,
context: &mut SurfmanContext,
size: &Size2D<i32, UnknownUnit>,
) -> Result<SurfaceTexture, SurfmanError> {
unsafe {
let image = ComPtr::from_raw(image as *mut ID3D11Texture2D);
image.AddRef();
device.create_surface_texture_from_texture(context, size, image)
}
}
}
fn get_matching_adapter(
requirements: &Requirements,
) -> Result<ComPtr<dxgi::IDXGIAdapter1>, String> {
unsafe {
let mut factory_ptr: *mut dxgi::IDXGIFactory1 = ptr::null_mut();
let result = dxgi::CreateDXGIFactory1(
&dxgi::IDXGIFactory1::uuidof(),
&mut factory_ptr as *mut _ as *mut _,
);
assert_eq!(result, S_OK);
let factory = ComPtr::from_raw(factory_ptr);
let index = 0;
loop {
let mut adapter_ptr = ptr::null_mut();
let result = factory.EnumAdapters1(index, &mut adapter_ptr);
if result == DXGI_ERROR_NOT_FOUND {
return Err("No matching adapter".to_owned());
}
assert_eq!(result, S_OK);
let adapter = ComPtr::from_raw(adapter_ptr);
let mut adapter_desc = mem::zeroed();
let result = adapter.GetDesc1(&mut adapter_desc);
assert_eq!(result, S_OK);
let adapter_luid = &adapter_desc.AdapterLuid;
if adapter_luid.LowPart == requirements.adapter_luid.LowPart
&& adapter_luid.HighPart == requirements.adapter_luid.HighPart
{
return Ok(adapter);
}
}
}
}
#[allow(unused)]
pub fn create_surfman_adapter() -> Option<SurfmanAdapter> {
let instance = create_instance(false, false, false, &AppInfo::default()).ok()?;
let system = instance
.instance
.system(FormFactor::HEAD_MOUNTED_DISPLAY)
.ok()?;
let requirements = D3D11::requirements(&instance.instance, system).ok()?;
let adapter = get_matching_adapter(&requirements).ok()?;
Some(SurfmanAdapter::from_dxgi_adapter(adapter.up()))
}

View file

@ -0,0 +1,743 @@
use std::ffi::c_void;
use std::mem::MaybeUninit;
use euclid::RigidTransform3D;
use log::debug;
use openxr::sys::{
HandJointLocationsEXT, HandJointsLocateInfoEXT, HandTrackingAimStateFB,
FB_HAND_TRACKING_AIM_EXTENSION_NAME,
};
use openxr::{
self, Action, ActionSet, Binding, FrameState, Graphics, Hand as HandEnum, HandJoint,
HandJointLocation, HandTracker, HandTrackingAimFlagsFB, Instance, Path, Posef, Session, Space,
SpaceLocationFlags, HAND_JOINT_COUNT,
};
use webxr_api::Finger;
use webxr_api::Hand;
use webxr_api::Handedness;
use webxr_api::Input;
use webxr_api::InputFrame;
use webxr_api::InputId;
use webxr_api::InputSource;
use webxr_api::JointFrame;
use webxr_api::Native;
use webxr_api::SelectEvent;
use webxr_api::TargetRayMode;
use webxr_api::Viewer;
use super::interaction_profiles::InteractionProfile;
use super::IDENTITY_POSE;
use crate::ext_string;
use crate::openxr::interaction_profiles::INTERACTION_PROFILES;
/// Number of frames to wait with the menu gesture before
/// opening the menu.
const MENU_GESTURE_SUSTAIN_THRESHOLD: u8 = 60;
/// Helper macro for binding action paths in an interaction profile entry
macro_rules! bind_inputs {
($actions:expr, $paths:expr, $hand:expr, $instance:expr, $ret:expr) => {
$actions.iter().enumerate().for_each(|(i, action)| {
let action_path = $paths[i];
if action_path != "" {
let path = $instance
.string_to_path(&format!("/user/hand/{}/input/{}", $hand, action_path))
.expect(&format!(
"Failed to create path for /user/hand/{}/input/{}",
$hand, action_path
));
let binding = Binding::new(action, path);
$ret.push(binding);
}
});
};
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum ClickState {
Clicking,
Done,
}
/// All the information on a single input frame
pub struct Frame {
pub frame: InputFrame,
pub select: Option<SelectEvent>,
pub squeeze: Option<SelectEvent>,
pub menu_selected: bool,
}
impl ClickState {
fn update_from_action<G: Graphics>(
&mut self,
action: &Action<bool>,
session: &Session<G>,
menu_selected: bool,
) -> (/* is_active */ bool, Option<SelectEvent>) {
let click = action.state(session, Path::NULL).unwrap();
let select_event =
self.update_from_value(click.current_state, click.is_active, menu_selected);
(click.is_active, select_event)
}
fn update_from_value(
&mut self,
current_state: bool,
is_active: bool,
menu_selected: bool,
) -> Option<SelectEvent> {
if is_active {
match (current_state, *self) {
(_, ClickState::Clicking) if menu_selected => {
*self = ClickState::Done;
// Cancel the select, we're showing a menu
Some(SelectEvent::End)
}
(true, ClickState::Done) => {
*self = ClickState::Clicking;
Some(SelectEvent::Start)
}
(false, ClickState::Clicking) => {
*self = ClickState::Done;
Some(SelectEvent::Select)
}
_ => None,
}
} else if *self == ClickState::Clicking {
*self = ClickState::Done;
// Cancel the select, we lost tracking
Some(SelectEvent::End)
} else {
None
}
}
}
pub struct OpenXRInput {
id: InputId,
action_aim_pose: Action<Posef>,
action_aim_space: Space,
action_grip_pose: Action<Posef>,
action_grip_space: Space,
action_click: Action<bool>,
action_squeeze: Action<bool>,
handedness: Handedness,
click_state: ClickState,
squeeze_state: ClickState,
menu_gesture_sustain: u8,
#[allow(unused)]
hand_tracker: Option<HandTracker>,
action_buttons_common: Vec<Action<f32>>,
action_buttons_left: Vec<Action<f32>>,
action_buttons_right: Vec<Action<f32>>,
action_axes_common: Vec<Action<f32>>,
use_alternate_input_source: bool,
}
fn hand_str(h: Handedness) -> &'static str {
match h {
Handedness::Right => "right",
Handedness::Left => "left",
_ => panic!("We don't support unknown handedness in openxr"),
}
}
impl OpenXRInput {
pub fn new<G: Graphics>(
id: InputId,
handedness: Handedness,
action_set: &ActionSet,
session: &Session<G>,
needs_hands: bool,
supported_interaction_profiles: Vec<&'static str>,
) -> Self {
let hand = hand_str(handedness);
let action_aim_pose: Action<Posef> = action_set
.create_action(
&format!("{}_hand_aim", hand),
&format!("{} hand aim", hand),
&[],
)
.unwrap();
let action_aim_space = action_aim_pose
.create_space(session.clone(), Path::NULL, IDENTITY_POSE)
.unwrap();
let action_grip_pose: Action<Posef> = action_set
.create_action(
&format!("{}_hand_grip", hand),
&format!("{} hand grip", hand),
&[],
)
.unwrap();
let action_grip_space = action_grip_pose
.create_space(session.clone(), Path::NULL, IDENTITY_POSE)
.unwrap();
let action_click: Action<bool> = action_set
.create_action(
&format!("{}_hand_click", hand),
&format!("{} hand click", hand),
&[],
)
.unwrap();
let action_squeeze: Action<bool> = action_set
.create_action(
&format!("{}_hand_squeeze", hand),
&format!("{} hand squeeze", hand),
&[],
)
.unwrap();
let hand_tracker = if needs_hands {
let hand = match handedness {
Handedness::Left => HandEnum::LEFT,
Handedness::Right => HandEnum::RIGHT,
_ => panic!("We don't support unknown handedness in openxr"),
};
session.create_hand_tracker(hand).ok()
} else {
None
};
let action_buttons_common: Vec<Action<f32>> = {
let button1: Action<f32> = action_set
.create_action(
&format!("{}_trigger", hand),
&format!("{}_trigger", hand),
&[],
)
.unwrap();
let button2: Action<f32> = action_set
.create_action(&format!("{}_grip", hand), &format!("{}_grip", hand), &[])
.unwrap();
let button3: Action<f32> = action_set
.create_action(
&format!("{}_touchpad_click", hand),
&format!("{}_touchpad_click", hand),
&[],
)
.unwrap();
let button4: Action<f32> = action_set
.create_action(
&format!("{}_thumbstick_click", hand),
&format!("{}_thumbstick_click", hand),
&[],
)
.unwrap();
vec![button1, button2, button3, button4]
};
let action_buttons_left = {
let button1: Action<f32> = action_set
.create_action(&format!("{}_x", hand), &format!("{}_x", hand), &[])
.unwrap();
let button2: Action<f32> = action_set
.create_action(&format!("{}_y", hand), &format!("{}_y", hand), &[])
.unwrap();
vec![button1, button2]
};
let action_buttons_right = {
let button1: Action<f32> = action_set
.create_action(&format!("{}_a", hand), &format!("{}_a", hand), &[])
.unwrap();
let button2: Action<f32> = action_set
.create_action(&format!("{}_b", hand), &format!("{}_b", hand), &[])
.unwrap();
vec![button1, button2]
};
let action_axes_common: Vec<Action<f32>> = {
let axis1: Action<f32> = action_set
.create_action(
&format!("{}_touchpad_x", hand),
&format!("{}_touchpad_x", hand),
&[],
)
.unwrap();
let axis2: Action<f32> = action_set
.create_action(
&format!("{}_touchpad_y", hand),
&format!("{}_touchpad_y", hand),
&[],
)
.unwrap();
let axis3: Action<f32> = action_set
.create_action(
&format!("{}_thumbstick_x", hand),
&format!("{}_thumbstick_x", hand),
&[],
)
.unwrap();
let axis4: Action<f32> = action_set
.create_action(
&format!("{}_thumbstick_y", hand),
&format!("{}_thumbstick_y", hand),
&[],
)
.unwrap();
vec![axis1, axis2, axis3, axis4]
};
let use_alternate_input_source = supported_interaction_profiles
.contains(&ext_string!(FB_HAND_TRACKING_AIM_EXTENSION_NAME));
Self {
id,
action_aim_pose,
action_aim_space,
action_grip_pose,
action_grip_space,
action_click,
action_squeeze,
handedness,
click_state: ClickState::Done,
squeeze_state: ClickState::Done,
menu_gesture_sustain: 0,
hand_tracker,
action_buttons_common,
action_axes_common,
action_buttons_left,
action_buttons_right,
use_alternate_input_source,
}
}
pub fn setup_inputs<G: Graphics>(
instance: &Instance,
session: &Session<G>,
needs_hands: bool,
supported_interaction_profiles: Vec<&'static str>,
) -> (ActionSet, Self, Self) {
let action_set = instance.create_action_set("hands", "Hands", 0).unwrap();
let right_hand = OpenXRInput::new(
InputId(0),
Handedness::Right,
&action_set,
&session,
needs_hands,
supported_interaction_profiles.clone(),
);
let left_hand = OpenXRInput::new(
InputId(1),
Handedness::Left,
&action_set,
&session,
needs_hands,
supported_interaction_profiles.clone(),
);
for profile in INTERACTION_PROFILES {
if let Some(extension_name) = profile.required_extension {
if !supported_interaction_profiles.contains(&ext_string!(extension_name)) {
continue;
}
}
if profile.path.is_empty() {
continue;
}
let select = profile.standard_buttons[0];
let squeeze = Option::from(profile.standard_buttons[1]).filter(|&s| !s.is_empty());
let mut bindings = right_hand.get_bindings(instance, select, squeeze, &profile);
bindings.extend(
left_hand
.get_bindings(instance, select, squeeze, &profile)
.into_iter(),
);
let path_controller = instance
.string_to_path(profile.path)
.expect(format!("Invalid interaction profile path: {}", profile.path).as_str());
if let Err(_) =
instance.suggest_interaction_profile_bindings(path_controller, &bindings)
{
debug!(
"Interaction profile path not available for this runtime: {:?}",
profile.path
);
}
}
session.attach_action_sets(&[&action_set]).unwrap();
(action_set, right_hand, left_hand)
}
fn get_bindings(
&self,
instance: &Instance,
select_name: &str,
squeeze_name: Option<&str>,
interaction_profile: &InteractionProfile,
) -> Vec<Binding> {
let hand = hand_str(self.handedness);
let path_aim_pose = instance
.string_to_path(&format!("/user/hand/{}/input/aim/pose", hand))
.expect(&format!(
"Failed to create path for /user/hand/{}/input/aim/pose",
hand
));
let binding_aim_pose = Binding::new(&self.action_aim_pose, path_aim_pose);
let path_grip_pose = instance
.string_to_path(&format!("/user/hand/{}/input/grip/pose", hand))
.expect(&format!(
"Failed to create path for /user/hand/{}/input/grip/pose",
hand
));
let binding_grip_pose = Binding::new(&self.action_grip_pose, path_grip_pose);
let path_click = instance
.string_to_path(&format!("/user/hand/{}/input/{}", hand, select_name))
.expect(&format!(
"Failed to create path for /user/hand/{}/input/{}",
hand, select_name
));
let binding_click = Binding::new(&self.action_click, path_click);
let mut ret = vec![binding_aim_pose, binding_grip_pose, binding_click];
if let Some(squeeze_name) = squeeze_name {
let path_squeeze = instance
.string_to_path(&format!("/user/hand/{}/input/{}", hand, squeeze_name))
.expect(&format!(
"Failed to create path for /user/hand/{}/input/{}",
hand, squeeze_name
));
let binding_squeeze = Binding::new(&self.action_squeeze, path_squeeze);
ret.push(binding_squeeze);
}
bind_inputs!(
self.action_buttons_common,
interaction_profile.standard_buttons,
hand,
instance,
ret
);
if !interaction_profile.left_buttons.is_empty() && hand == "left" {
bind_inputs!(
self.action_buttons_left,
interaction_profile.left_buttons,
hand,
instance,
ret
);
} else if !interaction_profile.right_buttons.is_empty() && hand == "right" {
bind_inputs!(
self.action_buttons_right,
interaction_profile.right_buttons,
hand,
instance,
ret
);
}
bind_inputs!(
self.action_axes_common,
interaction_profile.standard_axes,
hand,
instance,
ret
);
ret
}
pub fn frame<G: Graphics>(
&mut self,
session: &Session<G>,
frame_state: &FrameState,
base_space: &Space,
viewer: &RigidTransform3D<f32, Viewer, Native>,
) -> Frame {
use euclid::Vector3D;
let mut target_ray_origin = pose_for(&self.action_aim_space, frame_state, base_space);
let grip_origin = pose_for(&self.action_grip_space, frame_state, base_space);
let mut menu_selected = false;
// Check if the palm is facing up. This is our "menu" gesture.
if let Some(grip_origin) = grip_origin {
// The X axis of the grip is perpendicular to the palm, however its
// direction is the opposite for each hand
//
// We obtain a unit vector pointing out of the palm
let x_dir = if let Handedness::Left = self.handedness {
1.0
} else {
-1.0
};
// Rotate it by the grip to obtain the desired vector
let grip_x = grip_origin
.rotation
.transform_vector3d(Vector3D::new(x_dir, 0.0, 0.0));
let gaze = viewer
.rotation
.transform_vector3d(Vector3D::new(0., 0., 1.));
// If the angle is close enough to 0, its cosine will be
// close to 1
// check if the user's gaze is parallel to the palm
if gaze.dot(grip_x) > 0.95 {
let input_relative = (viewer.translation - grip_origin.translation).normalize();
// if so, check if the user is actually looking at the palm
if gaze.dot(input_relative) > 0.95 {
self.menu_gesture_sustain += 1;
if self.menu_gesture_sustain > MENU_GESTURE_SUSTAIN_THRESHOLD {
menu_selected = true;
self.menu_gesture_sustain = 0;
}
} else {
self.menu_gesture_sustain = 0
}
} else {
self.menu_gesture_sustain = 0;
}
} else {
self.menu_gesture_sustain = 0;
}
let hand = hand_str(self.handedness);
let click = self.action_click.state(session, Path::NULL).unwrap();
let squeeze = self.action_squeeze.state(session, Path::NULL).unwrap();
let (button_values, buttons_changed) = {
let mut changed = false;
let mut values = Vec::<f32>::new();
let mut sync_buttons = |actions: &Vec<Action<f32>>| {
let buttons = actions
.iter()
.map(|action| {
let state = action.state(session, Path::NULL).unwrap();
changed = changed || state.changed_since_last_sync;
state.current_state
})
.collect::<Vec<f32>>();
values.extend_from_slice(&buttons);
};
sync_buttons(&self.action_buttons_common);
if hand == "left" {
sync_buttons(&self.action_buttons_left);
} else if hand == "right" {
sync_buttons(&self.action_buttons_right);
}
(values, changed)
};
let (axis_values, axes_changed) = {
let mut changed = false;
let values = self
.action_axes_common
.iter()
.enumerate()
.map(|(i, action)| {
let state = action.state(session, Path::NULL).unwrap();
changed = changed || state.changed_since_last_sync;
// Invert input from y axes
state.current_state * if i % 2 == 1 { -1.0 } else { 1.0 }
})
.collect::<Vec<f32>>();
(values, changed)
};
let input_changed = buttons_changed || axes_changed;
let (click_is_active, mut click_event) = if !self.use_alternate_input_source {
self.click_state
.update_from_action(&self.action_click, session, menu_selected)
} else {
(true, None)
};
let (squeeze_is_active, squeeze_event) =
self.squeeze_state
.update_from_action(&self.action_squeeze, session, menu_selected);
let mut aim_state: Option<HandTrackingAimStateFB> = None;
let hand = self.hand_tracker.as_ref().and_then(|tracker| {
locate_hand(
base_space,
tracker,
frame_state,
self.use_alternate_input_source,
session,
&mut aim_state,
)
});
let mut pressed = click_is_active && click.current_state;
let squeezed = squeeze_is_active && squeeze.current_state;
if let Some(state) = aim_state {
target_ray_origin.replace(super::transform(&state.aim_pose));
let index_pinching = state
.status
.intersects(HandTrackingAimFlagsFB::INDEX_PINCHING);
click_event = self
.click_state
.update_from_value(index_pinching, true, menu_selected);
pressed = index_pinching;
}
let input_frame = InputFrame {
target_ray_origin,
id: self.id,
pressed,
squeezed,
grip_origin,
hand,
button_values,
axis_values,
input_changed,
};
Frame {
frame: input_frame,
select: click_event,
squeeze: squeeze_event,
menu_selected,
}
}
pub fn input_source(&self) -> InputSource {
let hand_support = if self.hand_tracker.is_some() {
// openxr runtimes must always support all or none joints
Some(Hand::<()>::default().map(|_, _| Some(())))
} else {
None
};
InputSource {
handedness: self.handedness,
id: self.id,
target_ray_mode: TargetRayMode::TrackedPointer,
supports_grip: true,
profiles: vec![],
hand_support,
}
}
}
fn pose_for(
action_space: &Space,
frame_state: &FrameState,
base_space: &Space,
) -> Option<RigidTransform3D<f32, Input, Native>> {
let location = action_space
.locate(base_space, frame_state.predicted_display_time)
.unwrap();
let pose_valid = location
.location_flags
.intersects(SpaceLocationFlags::POSITION_VALID | SpaceLocationFlags::ORIENTATION_VALID);
if pose_valid {
Some(super::transform(&location.pose))
} else {
None
}
}
fn locate_hand<G: Graphics>(
base_space: &Space,
tracker: &HandTracker,
frame_state: &FrameState,
use_alternate_input_source: bool,
session: &Session<G>,
aim_state: &mut Option<HandTrackingAimStateFB>,
) -> Option<Box<Hand<JointFrame>>> {
let mut state = HandTrackingAimStateFB::out(std::ptr::null_mut());
let locations = {
if !use_alternate_input_source {
base_space.locate_hand_joints(tracker, frame_state.predicted_display_time)
} else {
let locate_info = HandJointsLocateInfoEXT {
ty: HandJointsLocateInfoEXT::TYPE,
next: std::ptr::null(),
base_space: base_space.as_raw(),
time: frame_state.predicted_display_time,
};
let mut locations = MaybeUninit::<[HandJointLocation; HAND_JOINT_COUNT]>::uninit();
let mut location_info = HandJointLocationsEXT {
ty: HandJointLocationsEXT::TYPE,
next: &mut state as *mut _ as *mut c_void,
is_active: false.into(),
joint_count: HAND_JOINT_COUNT as u32,
joint_locations: locations.as_mut_ptr() as _,
};
// Check if hand tracking is supported by the session instance
let raw_hand_tracker = session.instance().exts().ext_hand_tracking.as_ref()?;
unsafe {
Ok(
match (raw_hand_tracker.locate_hand_joints)(
tracker.as_raw(),
&locate_info,
&mut location_info,
) {
openxr::sys::Result::SUCCESS if location_info.is_active.into() => {
aim_state.replace(state.assume_init());
Some(locations.assume_init())
}
_ => None,
},
)
}
}
};
let locations = if let Ok(Some(ref locations)) = locations {
Hand {
wrist: Some(&locations[HandJoint::WRIST]),
thumb_metacarpal: Some(&locations[HandJoint::THUMB_METACARPAL]),
thumb_phalanx_proximal: Some(&locations[HandJoint::THUMB_PROXIMAL]),
thumb_phalanx_distal: Some(&locations[HandJoint::THUMB_DISTAL]),
thumb_phalanx_tip: Some(&locations[HandJoint::THUMB_TIP]),
index: Finger {
metacarpal: Some(&locations[HandJoint::INDEX_METACARPAL]),
phalanx_proximal: Some(&locations[HandJoint::INDEX_PROXIMAL]),
phalanx_intermediate: Some(&locations[HandJoint::INDEX_INTERMEDIATE]),
phalanx_distal: Some(&locations[HandJoint::INDEX_DISTAL]),
phalanx_tip: Some(&locations[HandJoint::INDEX_TIP]),
},
middle: Finger {
metacarpal: Some(&locations[HandJoint::MIDDLE_METACARPAL]),
phalanx_proximal: Some(&locations[HandJoint::MIDDLE_PROXIMAL]),
phalanx_intermediate: Some(&locations[HandJoint::MIDDLE_INTERMEDIATE]),
phalanx_distal: Some(&locations[HandJoint::MIDDLE_DISTAL]),
phalanx_tip: Some(&locations[HandJoint::MIDDLE_TIP]),
},
ring: Finger {
metacarpal: Some(&locations[HandJoint::RING_METACARPAL]),
phalanx_proximal: Some(&locations[HandJoint::RING_PROXIMAL]),
phalanx_intermediate: Some(&locations[HandJoint::RING_INTERMEDIATE]),
phalanx_distal: Some(&locations[HandJoint::RING_DISTAL]),
phalanx_tip: Some(&locations[HandJoint::RING_TIP]),
},
little: Finger {
metacarpal: Some(&locations[HandJoint::LITTLE_METACARPAL]),
phalanx_proximal: Some(&locations[HandJoint::LITTLE_PROXIMAL]),
phalanx_intermediate: Some(&locations[HandJoint::LITTLE_INTERMEDIATE]),
phalanx_distal: Some(&locations[HandJoint::LITTLE_DISTAL]),
phalanx_tip: Some(&locations[HandJoint::LITTLE_TIP]),
},
}
} else {
return None;
};
Some(Box::new(locations.map(|loc, _| {
loc.and_then(|location| {
let pose_valid = location.location_flags.intersects(
SpaceLocationFlags::POSITION_VALID | SpaceLocationFlags::ORIENTATION_VALID,
);
if pose_valid {
Some(JointFrame {
pose: super::transform(&location.pose),
radius: location.radius,
})
} else {
None
}
})
})))
}

View file

@ -0,0 +1,444 @@
use openxr::{
sys::{
BD_CONTROLLER_INTERACTION_EXTENSION_NAME, EXT_HAND_INTERACTION_EXTENSION_NAME,
EXT_HP_MIXED_REALITY_CONTROLLER_EXTENSION_NAME,
EXT_SAMSUNG_ODYSSEY_CONTROLLER_EXTENSION_NAME, FB_HAND_TRACKING_AIM_EXTENSION_NAME,
FB_TOUCH_CONTROLLER_PRO_EXTENSION_NAME,
HTC_VIVE_COSMOS_CONTROLLER_INTERACTION_EXTENSION_NAME,
HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_EXTENSION_NAME,
META_TOUCH_CONTROLLER_PLUS_EXTENSION_NAME, ML_ML2_CONTROLLER_INTERACTION_EXTENSION_NAME,
},
ExtensionSet,
};
#[macro_export]
macro_rules! ext_string {
($ext_name:expr) => {
std::str::from_utf8($ext_name).unwrap()
};
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum InteractionProfileType {
KhrSimpleController,
BytedancePicoNeo3Controller,
BytedancePico4Controller,
BytedancePicoG3Controller,
GoogleDaydreamController,
HpMixedRealityController,
HtcViveController,
HtcViveCosmosController,
HtcViveFocus3Controller,
MagicLeap2Controller,
MicrosoftMixedRealityMotionController,
OculusGoController,
OculusTouchController,
FacebookTouchControllerPro,
MetaTouchPlusController,
MetaTouchControllerRiftCv1,
MetaTouchControllerQuest1RiftS,
MetaTouchControllerQuest2,
SamsungOdysseyController,
ValveIndexController,
ExtHandInteraction,
FbHandTrackingAim,
}
#[derive(Clone, Copy, Debug)]
pub struct InteractionProfile<'a> {
pub profile_type: InteractionProfileType,
/// The interaction profile path
pub path: &'static str,
/// The OpenXR extension, if any, required to use this profile
pub required_extension: Option<&'a [u8]>,
/// Trigger, Grip, Touchpad, Thumbstick
pub standard_buttons: &'a [&'a str],
/// Touchpad X, Touchpad Y, Thumbstick X, Thumbstick Y
pub standard_axes: &'a [&'a str],
/// Any additional buttons on the left controller
pub left_buttons: &'a [&'a str],
/// Any additional buttons on the right controller
pub right_buttons: &'a [&'a str],
/// The corresponding WebXR Input Profile names
pub profiles: &'a [&'a str],
}
pub static KHR_SIMPLE_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::KhrSimpleController,
path: "/interaction_profiles/khr/simple_controller",
required_extension: None,
standard_buttons: &["select/click", "", "", ""],
standard_axes: &["", "", "", ""],
left_buttons: &[],
right_buttons: &[],
profiles: &["generic-trigger"],
};
pub static BYTEDANCE_PICO_NEO3_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::BytedancePicoNeo3Controller,
path: "/interaction_profiles/bytedance/pico_neo3_controller",
required_extension: Some(BD_CONTROLLER_INTERACTION_EXTENSION_NAME),
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &["pico-neo3", "generic-trigger-squeeze-thumbstick"],
};
pub static BYTEDANCE_PICO_4_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::BytedancePico4Controller,
path: "/interaction_profiles/bytedance/pico4_controller",
required_extension: Some(BD_CONTROLLER_INTERACTION_EXTENSION_NAME),
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &["pico-4", "generic-trigger-squeeze-thumbstick"],
};
pub static BYTEDANCE_PICO_G3_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::BytedancePicoG3Controller,
path: "/interaction_profiles/bytedance/pico_g3_controller",
required_extension: Some(BD_CONTROLLER_INTERACTION_EXTENSION_NAME),
standard_buttons: &["trigger/value", "", "", "thumbstick/click"],
// Note: X/Y components not listed in the OpenXR spec currently due to vendor error.
// See <https://github.com/KhronosGroup/OpenXR-Docs/issues/158>
// It also uses the thumbstick path despite clearly being a touchpad, so
// move those values into the touchpad axes slots
standard_axes: &["thumbstick/x", "thumbstick/y", "", ""],
left_buttons: &[],
right_buttons: &[],
// Note: There is no corresponding WebXR Input profile for the Pico G3,
// but the controller seems identical to the G2, so use that instead.
profiles: &["pico-g2", "generic-trigger-touchpad"],
};
pub static GOOGLE_DAYDREAM_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::GoogleDaydreamController,
path: "/interaction_profiles/google/daydream_controller",
required_extension: None,
standard_buttons: &["select/click", "", "trackpad/click", ""],
standard_axes: &["trackpad/x", "trackpad/y", "", ""],
left_buttons: &[],
right_buttons: &[],
profiles: &["google-daydream", "generic-touchpad"],
};
pub static HP_MIXED_REALITY_MOTION_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::HpMixedRealityController,
path: "/interaction_profiles/hp/mixed_reality_controller",
required_extension: Some(EXT_HP_MIXED_REALITY_CONTROLLER_EXTENSION_NAME),
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &[
"hp-mixed-reality",
"oculus-touch",
"generic-trigger-squeeze-thumbstick",
],
};
pub static HTC_VIVE_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::HtcViveController,
path: "/interaction_profiles/htc/vive_controller",
required_extension: None,
standard_buttons: &["trigger/value", "squeeze/click", "trackpad/click", ""],
standard_axes: &["trackpad/x", "trackpad/y", "", ""],
left_buttons: &[],
right_buttons: &[],
profiles: &["htc-vive", "generic-trigger-squeeze-touchpad"],
};
pub static HTC_VIVE_COSMOS_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::HtcViveCosmosController,
path: "/interaction_profiles/htc/vive_cosmos_controller",
required_extension: Some(HTC_VIVE_COSMOS_CONTROLLER_INTERACTION_EXTENSION_NAME),
standard_buttons: &["trigger/value", "squeeze/click", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &["htc-vive-cosmos", "generic-trigger-squeeze-thumbstick"],
};
pub static HTC_VIVE_FOCUS3_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::HtcViveFocus3Controller,
path: "/interaction_profiles/htc/vive_focus3_controller",
required_extension: Some(HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_EXTENSION_NAME),
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &["htc-vive-focus-3", "generic-trigger-squeeze-thumbstick"],
};
pub static MAGIC_LEAP_2_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::MagicLeap2Controller,
path: "/interaction_profiles/ml/ml2_controller",
required_extension: Some(ML_ML2_CONTROLLER_INTERACTION_EXTENSION_NAME),
standard_buttons: &["trigger/value", "", "trackpad/click", ""],
standard_axes: &["trackpad/x", "trackpad/y", "", ""],
left_buttons: &[],
right_buttons: &[],
// Note: There is no corresponding WebXR Input profile for the Magic Leap 2,
// but the controller seems mostly identical to the 1, so use that instead.
profiles: &["magicleap-one", "generic-trigger-squeeze-touchpad"],
};
pub static MICROSOFT_MIXED_REALITY_MOTION_CONTROLLER_PROFILE: InteractionProfile =
InteractionProfile {
profile_type: InteractionProfileType::MicrosoftMixedRealityMotionController,
path: "/interaction_profiles/microsoft/motion_controller",
required_extension: None,
standard_buttons: &[
"trigger/value",
"squeeze/click",
"trackpad/click",
"thumbstick/click",
],
standard_axes: &["trackpad/x", "trackpad/y", "thumbstick/x", "thumbstick/y"],
left_buttons: &[],
right_buttons: &[],
profiles: &[
"microsoft-mixed-reality",
"generic-trigger-squeeze-touchpad-thumbstick",
],
};
pub static OCULUS_GO_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::OculusGoController,
path: "/interaction_profiles/oculus/go_controller",
required_extension: None,
standard_buttons: &["trigger/click", "", "trackpad/click", ""],
standard_axes: &["trackpad/x", "trackpad/y", "", ""],
left_buttons: &[],
right_buttons: &[],
profiles: &["oculus-go", "generic-trigger-touchpad"],
};
pub static OCULUS_TOUCH_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::OculusTouchController,
path: "/interaction_profiles/oculus/touch_controller",
required_extension: None,
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &[
"oculus-touch-v3",
"oculus-touch-v2",
"oculus-touch",
"generic-trigger-squeeze-thumbstick",
],
};
pub static FACEBOOK_TOUCH_CONTROLLER_PRO_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::FacebookTouchControllerPro,
path: "/interaction_profiles/facebook/touch_controller_pro",
required_extension: Some(FB_TOUCH_CONTROLLER_PRO_EXTENSION_NAME),
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &[
"meta-quest-touch-pro",
"oculus-touch-v2",
"oculus-touch",
"generic-trigger-squeeze-thumbstick",
],
};
pub static META_TOUCH_CONTROLLER_PLUS_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::MetaTouchPlusController,
path: "/interaction_profiles/meta/touch_controller_plus",
required_extension: Some(META_TOUCH_CONTROLLER_PLUS_EXTENSION_NAME),
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &[
"meta-quest-touch-plus",
"oculus-touch-v3",
"oculus-touch",
"generic-trigger-squeeze-thumbstick",
],
};
pub static META_TOUCH_CONTROLLER_RIFT_CV1_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::MetaTouchControllerRiftCv1,
path: "/interaction_profiles/meta/touch_controller_rift_cv1",
required_extension: None,
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &["oculus-touch", "generic-trigger-squeeze-thumbstick"],
};
pub static META_TOUCH_CONTROLLER_QUEST_1_RIFT_S_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::MetaTouchControllerQuest1RiftS,
path: "/interaction_profiles/meta/touch_controller_quest_1_rift_s",
required_extension: None,
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &[
"oculus-touch-v2",
"oculus-touch",
"generic-trigger-squeeze-thumbstick",
],
};
pub static META_TOUCH_CONTROLLER_QUEST_2_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::MetaTouchControllerQuest2,
path: "/interaction_profiles/meta/touch_controller_quest_2",
required_extension: None,
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["", "", "thumbstick/x", "thumbstick/y"],
left_buttons: &["x/click", "y/click"],
right_buttons: &["a/click", "b/click"],
profiles: &[
"oculus-touch-v3",
"oculus-touch-v2",
"oculus-touch",
"generic-trigger-squeeze-thumbstick",
],
};
pub static SAMSUNG_ODYSSEY_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::SamsungOdysseyController,
path: "/interaction_profiles/samsung/odyssey_controller",
required_extension: Some(EXT_SAMSUNG_ODYSSEY_CONTROLLER_EXTENSION_NAME),
standard_buttons: &[
"trigger/value",
"squeeze/click",
"trackpad/click",
"thumbstick/click",
],
standard_axes: &["trackpad/x", "trackpad/y", "thumbstick/x", "thumbstick/y"],
left_buttons: &[],
right_buttons: &[],
profiles: &[
"samsung-odyssey",
"microsoft-mixed-reality",
"generic-trigger-squeeze-touchpad-thumbstick",
],
};
pub static VALVE_INDEX_CONTROLLER_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::ValveIndexController,
path: "/interaction_profiles/valve/index_controller",
required_extension: None,
standard_buttons: &["trigger/value", "squeeze/value", "", "thumbstick/click"],
standard_axes: &["trackpad/x", "trackpad/y", "thumbstick/x", "thumbstick/y"],
left_buttons: &["a/click", "b/click"],
right_buttons: &["a/click", "b/click"],
profiles: &["valve-index", "generic-trigger-squeeze-touchpad-thumbstick"],
};
pub static EXT_HAND_INTERACTION_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::ExtHandInteraction,
path: "/interaction_profiles/ext/hand_interaction_ext",
required_extension: Some(EXT_HAND_INTERACTION_EXTENSION_NAME),
standard_buttons: &["pinch_ext/value", "", "", ""],
standard_axes: &["", "", "", ""],
left_buttons: &[],
right_buttons: &[],
profiles: &["generic-hand-select", "generic-hand"],
};
pub static FB_HAND_TRACKING_AIM_PROFILE: InteractionProfile = InteractionProfile {
profile_type: InteractionProfileType::FbHandTrackingAim,
path: "",
required_extension: Some(FB_HAND_TRACKING_AIM_EXTENSION_NAME),
standard_buttons: &["", "", "", ""],
standard_axes: &["", "", "", ""],
left_buttons: &[],
right_buttons: &[],
profiles: &["generic-hand-select", "generic-hand"],
};
pub static INTERACTION_PROFILES: [InteractionProfile; 22] = [
KHR_SIMPLE_CONTROLLER_PROFILE,
BYTEDANCE_PICO_NEO3_CONTROLLER_PROFILE,
BYTEDANCE_PICO_4_CONTROLLER_PROFILE,
BYTEDANCE_PICO_G3_CONTROLLER_PROFILE,
GOOGLE_DAYDREAM_CONTROLLER_PROFILE,
HP_MIXED_REALITY_MOTION_CONTROLLER_PROFILE,
HTC_VIVE_CONTROLLER_PROFILE,
HTC_VIVE_COSMOS_CONTROLLER_PROFILE,
HTC_VIVE_FOCUS3_CONTROLLER_PROFILE,
MAGIC_LEAP_2_CONTROLLER_PROFILE,
MICROSOFT_MIXED_REALITY_MOTION_CONTROLLER_PROFILE,
OCULUS_GO_CONTROLLER_PROFILE,
OCULUS_TOUCH_CONTROLLER_PROFILE,
FACEBOOK_TOUCH_CONTROLLER_PRO_PROFILE,
META_TOUCH_CONTROLLER_PLUS_PROFILE,
META_TOUCH_CONTROLLER_RIFT_CV1_PROFILE,
META_TOUCH_CONTROLLER_QUEST_1_RIFT_S_PROFILE,
META_TOUCH_CONTROLLER_QUEST_2_PROFILE,
SAMSUNG_ODYSSEY_CONTROLLER_PROFILE,
VALVE_INDEX_CONTROLLER_PROFILE,
EXT_HAND_INTERACTION_PROFILE,
FB_HAND_TRACKING_AIM_PROFILE,
];
pub fn get_profiles_from_path(path: String) -> &'static [&'static str] {
INTERACTION_PROFILES
.iter()
.find(|profile| profile.path == path)
.map_or(&[], |profile| profile.profiles)
}
pub fn get_supported_interaction_profiles(
supported_extensions: &ExtensionSet,
enabled_extensions: &mut ExtensionSet,
) -> Vec<&'static str> {
let mut extensions = Vec::new();
if supported_extensions.bd_controller_interaction {
extensions.push(ext_string!(BD_CONTROLLER_INTERACTION_EXTENSION_NAME));
enabled_extensions.bd_controller_interaction = true;
}
if supported_extensions.ext_hp_mixed_reality_controller {
extensions.push(ext_string!(EXT_HP_MIXED_REALITY_CONTROLLER_EXTENSION_NAME));
enabled_extensions.ext_hp_mixed_reality_controller = true;
}
if supported_extensions.ext_samsung_odyssey_controller {
extensions.push(ext_string!(EXT_SAMSUNG_ODYSSEY_CONTROLLER_EXTENSION_NAME));
enabled_extensions.ext_samsung_odyssey_controller = true;
}
if supported_extensions.ml_ml2_controller_interaction {
extensions.push(ext_string!(ML_ML2_CONTROLLER_INTERACTION_EXTENSION_NAME));
enabled_extensions.ml_ml2_controller_interaction = true;
}
if supported_extensions.htc_vive_cosmos_controller_interaction {
extensions.push(ext_string!(
HTC_VIVE_COSMOS_CONTROLLER_INTERACTION_EXTENSION_NAME
));
enabled_extensions.htc_vive_cosmos_controller_interaction = true;
}
if supported_extensions.htc_vive_focus3_controller_interaction {
extensions.push(ext_string!(
HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_EXTENSION_NAME
));
enabled_extensions.htc_vive_focus3_controller_interaction = true;
}
if supported_extensions.fb_touch_controller_pro {
extensions.push(ext_string!(FB_TOUCH_CONTROLLER_PRO_EXTENSION_NAME));
enabled_extensions.fb_touch_controller_pro = true;
}
if supported_extensions.meta_touch_controller_plus {
extensions.push(ext_string!(META_TOUCH_CONTROLLER_PLUS_EXTENSION_NAME));
enabled_extensions.meta_touch_controller_plus = true;
}
if supported_extensions.ext_hand_interaction {
extensions.push(ext_string!(EXT_HAND_INTERACTION_EXTENSION_NAME));
enabled_extensions.ext_hand_interaction = true;
}
if supported_extensions.fb_hand_tracking_aim {
extensions.push(ext_string!(FB_HAND_TRACKING_AIM_EXTENSION_NAME));
enabled_extensions.fb_hand_tracking_aim = true;
}
extensions
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,234 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! An implementation of layer management using surfman
use crate::gl_utils::GlClearer;
use euclid::{Point2D, Rect, Size2D};
use glow::{self as gl, Context as Gl, HasContext, PixelUnpackData};
use std::collections::HashMap;
use std::num::NonZeroU32;
use surfman::chains::{PreserveBuffer, SwapChains, SwapChainsAPI};
use surfman::{Context as SurfmanContext, Device as SurfmanDevice, SurfaceAccess, SurfaceTexture};
use webxr_api::{
ContextId, Error, GLContexts, GLTypes, LayerId, LayerInit, LayerManagerAPI, SubImage,
SubImages, Viewports,
};
#[derive(Copy, Clone, Debug)]
pub enum SurfmanGL {}
impl GLTypes for SurfmanGL {
type Device = SurfmanDevice;
type Context = SurfmanContext;
type Bindings = Gl;
}
pub struct SurfmanLayerManager {
layers: Vec<(ContextId, LayerId)>,
swap_chains: SwapChains<LayerId, SurfmanDevice>,
surface_textures: HashMap<LayerId, SurfaceTexture>,
depth_stencil_textures: HashMap<LayerId, Option<gl::NativeTexture>>,
viewports: Viewports,
clearer: GlClearer,
}
impl SurfmanLayerManager {
pub fn new(
viewports: Viewports,
swap_chains: SwapChains<LayerId, SurfmanDevice>,
) -> SurfmanLayerManager {
let layers = Vec::new();
let surface_textures = HashMap::new();
let depth_stencil_textures = HashMap::new();
let clearer = GlClearer::new(false);
SurfmanLayerManager {
layers,
swap_chains,
surface_textures,
depth_stencil_textures,
viewports,
clearer,
}
}
}
impl LayerManagerAPI<SurfmanGL> for SurfmanLayerManager {
fn create_layer(
&mut self,
device: &mut SurfmanDevice,
contexts: &mut dyn GLContexts<SurfmanGL>,
context_id: ContextId,
init: LayerInit,
) -> Result<LayerId, Error> {
let texture_size = init.texture_size(&self.viewports);
let layer_id = LayerId::new();
let access = SurfaceAccess::GPUOnly;
let size = texture_size.to_untyped();
// TODO: Treat depth and stencil separately?
let has_depth_stencil = match init {
LayerInit::WebGLLayer { stencil, depth, .. } => stencil | depth,
LayerInit::ProjectionLayer { stencil, depth, .. } => stencil | depth,
};
if has_depth_stencil {
let gl = contexts
.bindings(device, context_id)
.ok_or(Error::NoMatchingDevice)?;
let depth_stencil_texture = unsafe { gl.create_texture().ok() };
unsafe {
gl.bind_texture(gl::TEXTURE_2D, depth_stencil_texture);
gl.tex_image_2d(
gl::TEXTURE_2D,
0,
gl::DEPTH24_STENCIL8 as _,
size.width,
size.height,
0,
gl::DEPTH_STENCIL,
gl::UNSIGNED_INT_24_8,
PixelUnpackData::Slice(None),
);
}
self.depth_stencil_textures
.insert(layer_id, depth_stencil_texture);
}
let context = contexts
.context(device, context_id)
.ok_or(Error::NoMatchingDevice)?;
self.swap_chains
.create_detached_swap_chain(layer_id, size, device, context, access)
.map_err(|err| Error::BackendSpecific(format!("{:?}", err)))?;
self.layers.push((context_id, layer_id));
Ok(layer_id)
}
fn destroy_layer(
&mut self,
device: &mut SurfmanDevice,
contexts: &mut dyn GLContexts<SurfmanGL>,
context_id: ContextId,
layer_id: LayerId,
) {
self.clearer
.destroy_layer(device, contexts, context_id, layer_id);
let context = match contexts.context(device, context_id) {
Some(context) => context,
None => return,
};
self.layers.retain(|&ids| ids != (context_id, layer_id));
let _ = self.swap_chains.destroy(layer_id, device, context);
self.surface_textures.remove(&layer_id);
if let Some(depth_stencil_texture) = self.depth_stencil_textures.remove(&layer_id) {
let gl = contexts.bindings(device, context_id).unwrap();
if let Some(depth_stencil_texture) = depth_stencil_texture {
unsafe {
gl.delete_texture(depth_stencil_texture);
}
}
}
}
fn layers(&self) -> &[(ContextId, LayerId)] {
&self.layers[..]
}
fn begin_frame(
&mut self,
device: &mut SurfmanDevice,
contexts: &mut dyn GLContexts<SurfmanGL>,
layers: &[(ContextId, LayerId)],
) -> Result<Vec<SubImages>, Error> {
layers
.iter()
.map(|&(context_id, layer_id)| {
let context = contexts
.context(device, context_id)
.ok_or(Error::NoMatchingDevice)?;
let swap_chain = self
.swap_chains
.get(layer_id)
.ok_or(Error::NoMatchingDevice)?;
let surface_size = Size2D::from_untyped(swap_chain.size());
let surface_texture = swap_chain
.take_surface_texture(device, context)
.map_err(|_| Error::NoMatchingDevice)?;
let color_texture = device.surface_texture_object(&surface_texture);
let color_target = device.surface_gl_texture_target();
let depth_stencil_texture = self
.depth_stencil_textures
.get(&layer_id)
.cloned()
.flatten();
let texture_array_index = None;
let origin = Point2D::new(0, 0);
let sub_image = Some(SubImage {
color_texture,
depth_stencil_texture: depth_stencil_texture.map(|nt| nt.0.get()),
texture_array_index,
viewport: Rect::new(origin, surface_size),
});
let view_sub_images = self
.viewports
.viewports
.iter()
.map(|&viewport| SubImage {
color_texture,
depth_stencil_texture: depth_stencil_texture.map(|texture| texture.0.get()),
texture_array_index,
viewport,
})
.collect();
self.surface_textures.insert(layer_id, surface_texture);
self.clearer.clear(
device,
contexts,
context_id,
layer_id,
NonZeroU32::new(color_texture).map(gl::NativeTexture),
color_target,
depth_stencil_texture,
);
Ok(SubImages {
layer_id,
sub_image,
view_sub_images,
})
})
.collect()
}
fn end_frame(
&mut self,
device: &mut SurfmanDevice,
contexts: &mut dyn GLContexts<SurfmanGL>,
layers: &[(ContextId, LayerId)],
) -> Result<(), Error> {
for &(context_id, layer_id) in layers {
let gl = contexts
.bindings(device, context_id)
.ok_or(Error::NoMatchingDevice)?;
unsafe {
gl.flush();
}
let context = contexts
.context(device, context_id)
.ok_or(Error::NoMatchingDevice)?;
let surface_texture = self
.surface_textures
.remove(&layer_id)
.ok_or(Error::NoMatchingDevice)?;
let swap_chain = self
.swap_chains
.get(layer_id)
.ok_or(Error::NoMatchingDevice)?;
swap_chain
.recycle_surface_texture(device, context, surface_texture)
.map_err(|err| Error::BackendSpecific(format!("{:?}", err)))?;
swap_chain
.swap_buffers(device, context, PreserveBuffer::No)
.map_err(|err| Error::BackendSpecific(format!("{:?}", err)))?;
}
Ok(())
}
}

View file

@ -29,6 +29,9 @@ files = [
]
# Directories that are ignored for the non-WPT tidy check.
directories = [
# Ignored until these files are fully integrated into the workspace build.
"./components/webxr",
"./components/shared/webxr",
# Test have expectations in them, causing tidy to fail.
"./support/crown/tests",
# Upstream