Auto merge of #23731 - Manishearth:webxr-script, r=asajeffrey

Use new WebXR API in script

Todo:
 - [x] Hook new `Frame` information into `XRFrame`
 - [x] Make spaces use new transform info
 - [x] Hook up session view metadata correctly
 - [x] Get mocking working again
 - [x] Get inputs working

Optional todos:
 - [x] Add support for active and animationFrame bool on XRFrame
 - [x] Correctly handle viewer and offset spaces instead of casting
 - [x] Error on zero-length quaternions

<s>Not really ready for review yet, but you can go ahead and review what I have so far. It doesn't do anything yet, aside from crash horribly. I'm opening this PR early so i have a place to track progress.</s>

<!-- Reviewable:start -->
---
This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/23731)
<!-- Reviewable:end -->
This commit is contained in:
bors-servo 2019-07-11 18:31:21 -04:00 committed by GitHub
commit 5fdc7c0d2c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
33 changed files with 703 additions and 486 deletions

5
Cargo.lock generated
View file

@ -3923,6 +3923,7 @@ dependencies = [
"tendril 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
"tinyfiledialogs 3.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"typetag 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"utf-8 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -5511,7 +5512,7 @@ dependencies = [
[[package]]
name = "webxr"
version = "0.0.1"
source = "git+https://github.com/servo/webxr#39600af83b714e910389c912706d7cd06406c7bb"
source = "git+https://github.com/servo/webxr#3951634a06e2f9417f04e8ada969589f5b8a01d2"
dependencies = [
"euclid 0.19.8 (registry+https://github.com/rust-lang/crates.io-index)",
"gleam 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
@ -5522,7 +5523,7 @@ dependencies = [
[[package]]
name = "webxr-api"
version = "0.0.1"
source = "git+https://github.com/servo/webxr#39600af83b714e910389c912706d7cd06406c7bb"
source = "git+https://github.com/servo/webxr#3951634a06e2f9417f04e8ada969589f5b8a01d2"
dependencies = [
"euclid 0.19.8 (registry+https://github.com/rust-lang/crates.io-index)",
"gleam 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",

View file

@ -9,6 +9,7 @@ use euclid::Size2D;
use fnv::FnvHashMap;
use gleam::gl;
use half::f16;
use ipc_channel::ipc::IpcSender;
use offscreen_gl_context::{DrawBuffer, GLContext, NativeGLContextMethods};
use pixels::{self, PixelFormat};
use std::borrow::Cow;
@ -181,6 +182,9 @@ impl<VR: WebVRRenderHandler + 'static> WebGLThread<VR> {
WebGLMsg::Lock(ctx_id, sender) => {
self.handle_lock(ctx_id, sender);
},
WebGLMsg::LockIPC(ctx_id, sender) => {
self.handle_lock_ipc(ctx_id, sender);
},
WebGLMsg::Unlock(ctx_id) => {
self.handle_unlock(ctx_id);
},
@ -231,13 +235,27 @@ impl<VR: WebVRRenderHandler + 'static> WebGLThread<VR> {
);
}
}
/// Handles a lock external callback received from webrender::ExternalImageHandler
fn handle_lock(
&mut self,
context_id: WebGLContextId,
sender: WebGLSender<(u32, Size2D<i32>, usize)>,
) {
sender.send(self.handle_lock_inner(context_id)).unwrap();
}
/// handle_lock, but unconditionally IPC (used by webxr)
fn handle_lock_ipc(
&mut self,
context_id: WebGLContextId,
sender: IpcSender<(u32, Size2D<i32>, usize)>,
) {
sender.send(self.handle_lock_inner(context_id)).unwrap();
}
/// Shared code between handle_lock and handle_lock_ipc, does the actual syncing/flushing
/// but the caller must send the response back
fn handle_lock_inner(&mut self, context_id: WebGLContextId) -> (u32, Size2D<i32>, usize) {
let data =
Self::make_current_if_needed(context_id, &self.contexts, &mut self.bound_context_id)
.expect("WebGLContext not found in a WebGLMsg::Lock message");
@ -251,9 +269,7 @@ impl<VR: WebVRRenderHandler + 'static> WebGLThread<VR> {
// Without proper flushing, the sync object may never be signaled.
data.ctx.gl().flush();
sender
.send((info.texture_id, info.size, gl_sync as usize))
.unwrap();
(info.texture_id, info.size, gl_sync as usize)
}
/// Handles an unlock external callback received from webrender::ExternalImageHandler

View file

@ -7,7 +7,7 @@ use gleam::gl;
use gleam::gl::GLsync;
use gleam::gl::GLuint;
use gleam::gl::Gl;
use ipc_channel::ipc::{IpcBytesReceiver, IpcBytesSender, IpcSender, IpcSharedMemory};
use ipc_channel::ipc::{self, IpcBytesReceiver, IpcBytesSender, IpcSender, IpcSharedMemory};
use pixels::PixelFormat;
use std::borrow::Cow;
use std::fmt;
@ -61,6 +61,8 @@ pub enum WebGLMsg {
/// The WR client should not change the shared texture content until the Unlock call.
/// Currently OpenGL Sync Objects are used to implement the synchronization mechanism.
Lock(WebGLContextId, WebGLSender<(u32, Size2D<i32>, usize)>),
/// Lock(), but unconditionally IPC (used by webxr)
LockIPC(WebGLContextId, IpcSender<(u32, Size2D<i32>, usize)>),
/// Unlocks a specific WebGLContext. Unlock messages are used for a correct synchronization
/// with WebRender external image API.
/// The WR unlocks a context when it finished reading the shared texture contents.
@ -195,9 +197,9 @@ struct SerializableWebGLMsgSender {
#[typetag::serde]
impl webxr_api::WebGLExternalImageApi for SerializableWebGLMsgSender {
fn lock(&self) -> Result<(GLuint, Size2D<i32>, GLsync), webxr_api::Error> {
let (sender, receiver) = webgl_channel().or(Err(webxr_api::Error::CommunicationError))?;
let (sender, receiver) = ipc::channel().or(Err(webxr_api::Error::CommunicationError))?;
self.sender
.send(WebGLMsg::Lock(self.ctx_id, sender))
.send(WebGLMsg::LockIPC(self.ctx_id, sender))
.or(Err(webxr_api::Error::CommunicationError))?;
let (texture, size, sync) = receiver
.recv()

View file

@ -296,6 +296,8 @@ mod gen {
enabled: bool,
#[serde(default)]
test: bool,
#[serde(default)]
glwindow: bool,
},
worklet: {
blockingsleep: {

View file

@ -105,6 +105,7 @@ style_traits = {path = "../style_traits"}
swapper = "0.1"
tendril = {version = "0.4.1", features = ["encoding_rs"]}
time = "0.1.12"
typetag = "0.1"
unicode-segmentation = "1.1.0"
url = "1.6"
utf-8 = "0.7"

View file

@ -57,8 +57,8 @@ use devtools_traits::{CSSError, TimelineMarkerType, WorkerId};
use encoding_rs::{Decoder, Encoding};
use euclid::Length as EuclidLength;
use euclid::{
Point2D, Rect, RigidTransform3D, Rotation3D, Transform2D, Transform3D, TypedScale, TypedSize2D,
Vector2D,
Point2D, Rect, RigidTransform3D, Rotation3D, Transform2D, Transform3D, TypedRigidTransform3D,
TypedScale, TypedSize2D, Vector2D,
};
use html5ever::buffer_queue::BufferQueue;
use html5ever::{LocalName, Namespace, Prefix, QualName};
@ -486,7 +486,12 @@ unsafe_no_jsmanaged_fields!(WebGLVersion);
unsafe_no_jsmanaged_fields!(WebGLSLVersion);
unsafe_no_jsmanaged_fields!(MediaList);
unsafe_no_jsmanaged_fields!(WebVRGamepadData, WebVRGamepadState, WebVRGamepadHand);
unsafe_no_jsmanaged_fields!(webxr_api::Registry);
unsafe_no_jsmanaged_fields!(
webxr_api::Registry,
webxr_api::Session,
webxr_api::Frame,
webxr_api::InputSource
);
unsafe_no_jsmanaged_fields!(ScriptToConstellationChan);
unsafe_no_jsmanaged_fields!(InteractiveMetrics);
unsafe_no_jsmanaged_fields!(InteractiveWindow);
@ -607,6 +612,13 @@ unsafe impl<T, U> JSTraceable for TypedScale<f32, T, U> {
}
}
unsafe impl<T, U> JSTraceable for TypedRigidTransform3D<f32, T, U> {
#[inline]
unsafe fn trace(&self, _trc: *mut JSTracer) {
// Do nothing
}
}
unsafe impl<T> JSTraceable for EuclidLength<u64, T> {
#[inline]
unsafe fn trace(&self, _trc: *mut JSTracer) {

View file

@ -7,43 +7,39 @@ use crate::dom::bindings::codegen::Bindings::FakeXRDeviceBinding::{
};
use crate::dom::bindings::codegen::Bindings::XRViewBinding::XREye;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use webvr_traits::{MockVRControlMsg, MockVRView, WebVRMsg};
use euclid::{TypedRigidTransform3D, TypedRotation3D, TypedTransform3D, TypedVector3D};
use ipc_channel::ipc::IpcSender;
use webxr_api::{MockDeviceMsg, View, Views};
#[dom_struct]
pub struct FakeXRDevice {
reflector: Reflector,
#[ignore_malloc_size_of = "defined in ipc-channel"]
sender: IpcSender<MockDeviceMsg>,
}
impl FakeXRDevice {
pub fn new_inherited() -> FakeXRDevice {
pub fn new_inherited(sender: IpcSender<MockDeviceMsg>) -> FakeXRDevice {
FakeXRDevice {
reflector: Reflector::new(),
sender,
}
}
pub fn new(global: &GlobalScope) -> DomRoot<FakeXRDevice> {
pub fn new(global: &GlobalScope, sender: IpcSender<MockDeviceMsg>) -> DomRoot<FakeXRDevice> {
reflect_dom_object(
Box::new(FakeXRDevice::new_inherited()),
Box::new(FakeXRDevice::new_inherited(sender)),
global,
FakeXRDeviceBinding::Wrap,
)
}
fn send_msg(&self, msg: MockVRControlMsg) {
self.global()
.as_window()
.webvr_thread()
.unwrap()
.send(WebVRMsg::MessageMockDisplay(msg))
.unwrap();
}
}
pub fn get_views(views: &[FakeXRViewInit]) -> Fallible<(MockVRView, MockVRView)> {
pub fn get_views(views: &[FakeXRViewInit]) -> Fallible<Views> {
if views.len() != 2 {
return Err(Error::NotSupported);
}
@ -66,45 +62,53 @@ pub fn get_views(views: &[FakeXRViewInit]) -> Fallible<(MockVRView, MockVRView)>
let mut proj_r = [0.; 16];
let v: Vec<_> = left.projectionMatrix.iter().map(|x| **x).collect();
proj_l.copy_from_slice(&v);
let proj_l = TypedTransform3D::from_array(proj_l);
let v: Vec<_> = right.projectionMatrix.iter().map(|x| **x).collect();
proj_r.copy_from_slice(&v);
let proj_r = TypedTransform3D::from_array(proj_r);
let mut offset_l = [0.; 3];
let mut offset_r = [0.; 3];
let v: Vec<_> = left.viewOffset.position.iter().map(|x| **x).collect();
offset_l.copy_from_slice(&v);
let v: Vec<_> = right.viewOffset.position.iter().map(|x| **x).collect();
offset_r.copy_from_slice(&v);
let left = MockVRView {
// spec defines offsets as origins, but mock API expects the inverse transform
let offset_l = get_origin(&left.viewOffset)?.inverse();
let offset_r = get_origin(&right.viewOffset)?.inverse();
let left = View {
projection: proj_l,
offset: offset_l,
transform: offset_l,
};
let right = MockVRView {
let right = View {
projection: proj_r,
offset: offset_r,
transform: offset_r,
};
Ok((left, right))
Ok(Views::Stereo(left, right))
}
pub fn get_origin(origin: &FakeXRRigidTransformInit) -> Fallible<([f32; 3], [f32; 4])> {
pub fn get_origin<T, U>(
origin: &FakeXRRigidTransformInit,
) -> Fallible<TypedRigidTransform3D<f32, T, U>> {
if origin.position.len() != 3 || origin.orientation.len() != 4 {
return Err(Error::Type("Incorrectly sized array".into()));
}
let mut p = [0.; 3];
let mut o = [0.; 4];
let v: Vec<_> = origin.position.iter().map(|x| **x).collect();
p.copy_from_slice(&v[0..3]);
let v: Vec<_> = origin.orientation.iter().map(|x| **x).collect();
o.copy_from_slice(&v);
let p = TypedVector3D::new(
*origin.position[0],
*origin.position[1],
*origin.position[2],
);
let o = TypedRotation3D::unit_quaternion(
*origin.orientation[0],
*origin.orientation[1],
*origin.orientation[2],
*origin.orientation[3],
);
Ok((p, o))
Ok(TypedRigidTransform3D::new(o, p))
}
impl FakeXRDeviceMethods for FakeXRDevice {
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn SetViews(&self, views: Vec<FakeXRViewInit>) -> Fallible<()> {
let (left, right) = get_views(&views)?;
self.send_msg(MockVRControlMsg::SetViews(left, right));
let _ = self
.sender
.send(MockDeviceMsg::SetViews(get_views(&views)?));
Ok(())
}
@ -114,8 +118,9 @@ impl FakeXRDeviceMethods for FakeXRDevice {
origin: &FakeXRRigidTransformInit,
_emulated_position: bool,
) -> Fallible<()> {
let (position, orientation) = get_origin(origin)?;
self.send_msg(MockVRControlMsg::SetViewerPose(position, orientation));
let _ = self
.sender
.send(MockDeviceMsg::SetViewerOrigin(get_origin(origin)?));
Ok(())
}
}

View file

@ -15,14 +15,12 @@ use crate::dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGL
use crate::dom::bindings::codegen::Bindings::WindowBinding::FrameRequestCallback;
use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use crate::dom::bindings::codegen::Bindings::XRRenderStateBinding::XRRenderStateInit;
use crate::dom::bindings::codegen::Bindings::XRSessionBinding::XRFrameRequestCallback;
use crate::dom::bindings::codegen::Bindings::XRWebGLLayerBinding::XRWebGLLayerMethods;
use crate::dom::bindings::error::Error;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::{Dom, DomRoot, MutDom, MutNullableDom};
use crate::dom::bindings::root::{DomRoot, MutDom, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::event::Event;
use crate::dom::eventtarget::EventTarget;
@ -35,10 +33,6 @@ use crate::dom::vrframedata::VRFrameData;
use crate::dom::vrpose::VRPose;
use crate::dom::vrstageparameters::VRStageParameters;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use crate::dom::xrframe::XRFrame;
use crate::dom::xrinputsource::XRInputSource;
use crate::dom::xrsession::XRSession;
use crate::dom::xrwebgllayer::XRWebGLLayer;
use crate::script_runtime::CommonScriptMsg;
use crate::script_runtime::ScriptThreadEventCategory::WebVREvent;
use crate::task_source::{TaskSource, TaskSourceName};
@ -48,7 +42,6 @@ use dom_struct::dom_struct;
use ipc_channel::ipc::IpcSender;
use profile_traits::ipc;
use std::cell::Cell;
use std::collections::HashMap;
use std::mem;
use std::ops::Deref;
use std::rc::Rc;
@ -79,8 +72,6 @@ pub struct VRDisplay {
/// List of request animation frame callbacks
#[ignore_malloc_size_of = "closures are hard"]
raf_callback_list: DomRefCell<Vec<(u32, Option<Rc<FrameRequestCallback>>)>>,
#[ignore_malloc_size_of = "closures are hard"]
xr_raf_callback_list: DomRefCell<Vec<(u32, Option<Rc<XRFrameRequestCallback>>)>>,
/// When there isn't any layer_ctx the RAF thread needs to be "woken up"
raf_wakeup_sender: DomRefCell<Option<Sender<()>>>,
#[ignore_malloc_size_of = "Rc is hard"]
@ -92,12 +83,6 @@ pub struct VRDisplay {
running_display_raf: Cell<bool>,
paused: Cell<bool>,
stopped_on_pause: Cell<bool>,
/// Whether or not this is XR mode, and the session
xr_session: MutNullableDom<XRSession>,
/// Have inputs been initialized? (i.e, has getInputSources() been called?)
/// XR only
initialized_inputs: Cell<bool>,
input_sources: DomRefCell<HashMap<u32, Dom<XRInputSource>>>,
}
unsafe_no_jsmanaged_fields!(WebVRDisplayData);
@ -121,8 +106,6 @@ struct VRRAFUpdate {
/// Number uniquely identifying the WebGL context
/// so that we may setup/tear down VR compositors as things change
context_id: usize,
/// Do we need input data?
needs_inputs: bool,
}
type VRRAFUpdateSender = Sender<Result<VRRAFUpdate, ()>>;
@ -159,7 +142,6 @@ impl VRDisplay {
layer_ctx: MutNullableDom::default(),
next_raf_id: Cell::new(1),
raf_callback_list: DomRefCell::new(vec![]),
xr_raf_callback_list: DomRefCell::new(vec![]),
raf_wakeup_sender: DomRefCell::new(None),
pending_renderstate_updates: DomRefCell::new(vec![]),
frame_data_status: Cell::new(VRFrameDataStatus::Waiting),
@ -171,9 +153,6 @@ impl VRDisplay {
// This flag is set when the Display was presenting when it received a VR Pause event.
// When the VR Resume event is received and the flag is set, VR presentation automatically restarts.
stopped_on_pause: Cell::new(false),
xr_session: MutNullableDom::default(),
initialized_inputs: Cell::new(false),
input_sources: DomRefCell::new(HashMap::new()),
}
}
@ -184,14 +163,6 @@ impl VRDisplay {
VRDisplayBinding::Wrap,
)
}
pub fn left_eye_params_offset(&self) -> [f32; 3] {
self.left_eye_params.get().offset_array()
}
pub fn right_eye_params_offset(&self) -> [f32; 3] {
self.right_eye_params.get().offset_array()
}
}
impl Drop for VRDisplay {
@ -629,51 +600,11 @@ impl VRDisplay {
depth_far: self.depth_far.get(),
api_sender: self.api_sender(),
context_id: self.context_id(),
needs_inputs: self.initialized_inputs.get(),
}
}
pub fn queue_renderstate(&self, state: &XRRenderStateInit, promise: Rc<Promise>) {
// can't clone dictionaries
let new_state = XRRenderStateInit {
depthNear: state.depthNear,
depthFar: state.depthFar,
baseLayer: state.baseLayer.clone(),
};
self.pending_renderstate_updates
.borrow_mut()
.push((new_state, promise));
if let Some(ref wakeup) = *self.raf_wakeup_sender.borrow() {
let _ = wakeup.send(());
}
}
fn process_renderstate_queue(&self) {
let mut updates = self.pending_renderstate_updates.borrow_mut();
debug_assert!(updates.is_empty() || self.xr_session.get().is_some());
for update in updates.drain(..) {
if let Some(near) = update.0.depthNear {
self.depth_near.set(*near);
}
if let Some(far) = update.0.depthFar {
self.depth_far.set(*far);
}
if let Some(ref layer) = update.0.baseLayer {
self.xr_session.get().unwrap().set_layer(&layer);
let layer = layer.downcast::<XRWebGLLayer>().unwrap();
self.layer_ctx.set(Some(&layer.Context()));
}
update.1.resolve_native(&());
}
}
fn init_present(&self) {
self.presenting.set(true);
let xr = self.global().as_window().Navigator().Xr();
xr.set_active_immersive_session(&self);
self.process_renderstate_queue();
if self.has_raf_thread.get() {
return;
}
@ -693,7 +624,6 @@ impl VRDisplay {
let (raf_sender, raf_receiver) = unbounded();
let (wakeup_sender, wakeup_receiver) = unbounded();
*self.raf_wakeup_sender.borrow_mut() = Some(wakeup_sender);
let mut needs_inputs = false;
// The render loop at native headset frame rate is implemented using a dedicated thread.
// Every loop iteration syncs pose data with the HMD, submits the pixels to the display and waits for Vsync.
@ -734,7 +664,7 @@ impl VRDisplay {
display_id,
near,
far,
needs_inputs,
false,
sync_sender.clone(),
);
api_sender.send_vr(msg).unwrap();
@ -744,7 +674,6 @@ impl VRDisplay {
let this = address.clone();
let task = Box::new(task!(flush_renderstate_queue: move || {
let this = this.root();
this.process_renderstate_queue();
sender.send(Ok(this.vr_raf_update())).unwrap();
}));
js_sender
@ -761,7 +690,6 @@ impl VRDisplay {
if let Ok(update) = raf_receiver.recv().unwrap() {
near = update.depth_near;
far = update.depth_far;
needs_inputs = update.needs_inputs;
if update.context_id != context_id {
if let Some(ref api_sender) = update.api_sender {
api_sender
@ -790,8 +718,6 @@ impl VRDisplay {
fn stop_present(&self) {
self.presenting.set(false);
let xr = self.global().as_window().Navigator().Xr();
xr.deactivate_session();
*self.frame_data_receiver.borrow_mut() = None;
self.has_raf_thread.set(false);
if let Some(api_sender) = self.api_sender() {
@ -820,14 +746,6 @@ impl VRDisplay {
match receiver.recv().unwrap() {
Ok(pose) => {
*self.frame_data.borrow_mut() = pose.frame.block();
if self.initialized_inputs.get() {
let inputs = self.input_sources.borrow();
for (id, state) in pose.gamepads {
if let Some(input) = inputs.get(&id) {
input.update_state(state);
}
}
}
VRFrameDataStatus::Synced
},
Err(()) => VRFrameDataStatus::Exit,
@ -844,22 +762,6 @@ impl VRDisplay {
let now = self.global().as_window().Performance().Now();
if let Some(session) = self.xr_session.get() {
let mut callbacks = mem::replace(&mut *self.xr_raf_callback_list.borrow_mut(), vec![]);
if callbacks.is_empty() {
return;
}
self.sync_frame_data();
let frame = XRFrame::new(&self.global(), &session, self.frame_data.borrow().clone());
for (_, callback) in callbacks.drain(..) {
if let Some(callback) = callback {
let _ = callback.Call__(Finite::wrap(*now), &frame, ExceptionHandling::Report);
}
}
// frame submission is automatic in XR
self.SubmitFrame();
} else {
self.running_display_raf.set(true);
let mut callbacks = mem::replace(&mut *self.raf_callback_list.borrow_mut(), vec![]);
// Call registered VRDisplay.requestAnimationFrame callbacks.
@ -877,9 +779,7 @@ impl VRDisplay {
warn!("WebVR: You should call GetFrameData while presenting");
self.sync_frame_data();
}
}
self.process_renderstate_queue();
match self.frame_data_status.get() {
VRFrameDataStatus::Synced => {
// Sync succeeded. Notify RAF thread.
@ -894,88 +794,6 @@ impl VRDisplay {
}
}
// XR stuff
// XXXManishearth eventually we should share as much logic as possible
impl VRDisplay {
pub fn xr_present(
&self,
session: &XRSession,
ctx: Option<&WebGLRenderingContext>,
promise: Option<Rc<Promise>>,
) {
let layer_bounds = WebVRLayer::default();
self.xr_session.set(Some(session));
let session = Trusted::new(session);
self.request_present(layer_bounds, ctx, promise, move |p| {
let session = session.root();
p.resolve_native(&session);
});
}
pub fn xr_raf(&self, callback: Rc<XRFrameRequestCallback>) -> u32 {
let raf_id = self.next_raf_id.get();
self.next_raf_id.set(raf_id + 1);
self.xr_raf_callback_list
.borrow_mut()
.push((raf_id, Some(callback)));
raf_id
}
pub fn xr_cancel_raf(&self, handle: i32) {
let mut list = self.xr_raf_callback_list.borrow_mut();
if let Some(pair) = list.iter_mut().find(|pair| pair.0 == handle as u32) {
pair.1 = None;
}
}
/// Initialize XRInputSources
fn initialize_inputs(&self) {
if self.initialized_inputs.get() {
return;
}
self.initialized_inputs.set(true);
let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap();
let display = self.display.borrow().display_id;
self.webvr_thread()
.send(WebVRMsg::GetGamepadsForDisplay(display, sender))
.unwrap();
match receiver.recv().unwrap() {
Ok(gamepads) => {
let global = self.global();
let session = self
.xr_session
.get()
.expect("initialize_inputs called on a VR session");
let roots: Vec<_> = gamepads
.into_iter()
.map(|g| {
(
g.1.gamepad_id,
XRInputSource::new(&global, &session, g.0, g.1),
)
})
.collect();
let mut inputs = self.input_sources.borrow_mut();
for (id, root) in &roots {
inputs.insert(*id, Dom::from_ref(&root));
}
},
Err(_) => {},
}
}
pub fn get_input_sources(&self) -> Vec<DomRoot<XRInputSource>> {
self.initialize_inputs();
self.input_sources
.borrow()
.iter()
.map(|(_, x)| DomRoot::from_ref(&**x))
.collect()
}
}
// WebVR Spec: If the number of values in the leftBounds/rightBounds arrays
// is not 0 or 4 for any of the passed layers the promise is rejected
fn parse_bounds(src: &Option<Vec<Finite<f32>>>, dst: &mut [f32; 4]) -> Result<(), &'static str> {

View file

@ -62,10 +62,6 @@ impl VREyeParameters {
eye_parameters
}
pub fn offset_array(&self) -> [f32; 3] {
self.parameters.borrow().offset
}
}
impl VREyeParametersMethods for VREyeParameters {

View file

@ -36,13 +36,11 @@ dictionary FakeXRViewInit {
// https://immersive-web.github.io/webxr/#view-offset
required FakeXRRigidTransformInit viewOffset;
// https://immersive-web.github.io/webxr/#dom-xrwebgllayer-getviewport
required FakeXRViewportInit viewport;
required FakeXRDeviceResolution resolution;
};
// https://immersive-web.github.io/webxr/#xrviewport
dictionary FakeXRViewportInit {
required long x;
required long y;
dictionary FakeXRDeviceResolution {
required long width;
required long height;
};

View file

@ -29,7 +29,7 @@ interface XRSession : EventTarget {
// FrozenArray<XRInputSource> getInputSources();
sequence<XRInputSource> getInputSources();
Promise<void> updateRenderState(optional XRRenderStateInit state);
void updateRenderState(optional XRRenderStateInit state);
long requestAnimationFrame(XRFrameRequestCallback callback);
void cancelAnimationFrame(long handle);

View file

@ -29,7 +29,7 @@ dictionary FakeXRDeviceInit {
// The bounds coordinates. If null, bounded reference spaces are not supported.
sequence<FakeXRBoundsPoint> boundsCoodinates;
// Eye level used for calculating floor-level spaces
float eyeLevel = 1.5;
FakeXRRigidTransformInit floorOrigin;
FakeXRRigidTransformInit viewerOrigin;
};

View file

@ -6,7 +6,8 @@
enum XREye {
"left",
"right"
"right",
"unknown",
};
[SecureContext, Exposed=Window, Pref="dom.webxr.enabled"]

View file

@ -266,6 +266,9 @@ pub struct Window {
#[ignore_malloc_size_of = "channels are hard"]
webvr_chan: Option<IpcSender<WebVRMsg>>,
#[ignore_malloc_size_of = "defined in webxr"]
webxr_registry: webxr_api::Registry,
/// A map for storing the previous permission state read results.
permission_state_invocation_results: DomRefCell<HashMap<String, PermissionState>>,
@ -436,6 +439,10 @@ impl Window {
self.webvr_chan.clone()
}
pub fn webxr_registry(&self) -> webxr_api::Registry {
self.webxr_registry.clone()
}
fn new_paint_worklet(&self) -> DomRoot<Worklet> {
debug!("Creating new paint worklet.");
Worklet::new(self, WorkletGlobalScopeType::Paint)
@ -2072,6 +2079,7 @@ impl Window {
navigation_start_precise: u64,
webgl_chan: Option<WebGLChan>,
webvr_chan: Option<IpcSender<WebVRMsg>>,
webxr_registry: webxr_api::Registry,
microtask_queue: Rc<MicrotaskQueue>,
webrender_document: DocumentId,
webrender_api_sender: RenderApiSender,
@ -2149,6 +2157,7 @@ impl Window {
test_runner: Default::default(),
webgl_chan,
webvr_chan,
webxr_registry,
permission_state_invocation_results: Default::default(),
pending_layout_images: Default::default(),
unminified_js_dir: Default::default(),

View file

@ -10,6 +10,7 @@ use crate::dom::bindings::codegen::Bindings::XRBinding::XRSessionCreationOptions
use crate::dom::bindings::codegen::Bindings::XRBinding::{XRMethods, XRSessionMode};
use crate::dom::bindings::error::Error;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::event::Event;
@ -22,13 +23,16 @@ use crate::dom::vrdisplay::VRDisplay;
use crate::dom::vrdisplayevent::VRDisplayEvent;
use crate::dom::xrsession::XRSession;
use crate::dom::xrtest::XRTest;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use ipc_channel::ipc::IpcSender;
use ipc_channel::router::ROUTER;
use profile_traits::ipc;
use std::cell::Cell;
use std::rc::Rc;
use webvr_traits::{WebVRDisplayData, WebVRDisplayEvent, WebVREvent, WebVRMsg};
use webvr_traits::{WebVRGamepadData, WebVRGamepadEvent, WebVRGamepadState};
use webxr_api::{Error as XRError, Session, SessionMode};
#[dom_struct]
pub struct XR {
@ -36,7 +40,7 @@ pub struct XR {
displays: DomRefCell<Vec<Dom<VRDisplay>>>,
gamepads: DomRefCell<Vec<Dom<Gamepad>>>,
pending_immersive_session: Cell<bool>,
active_immersive_session: MutNullableDom<VRDisplay>,
active_immersive_session: MutNullableDom<XRSession>,
test: MutNullableDom<XRTest>,
}
@ -66,17 +70,12 @@ impl XR {
self.pending_immersive_session.set(true)
}
pub fn set_active_immersive_session(&self, session: &VRDisplay) {
pub fn set_active_immersive_session(&self, session: &XRSession) {
// XXXManishearth when we support non-immersive (inline) sessions we should
// ensure they never reach these codepaths
self.pending_immersive_session.set(false);
self.active_immersive_session.set(Some(session))
}
pub fn deactivate_session(&self) {
self.pending_immersive_session.set(false);
self.active_immersive_session.set(None)
}
}
impl Drop for XR {
@ -85,17 +84,67 @@ impl Drop for XR {
}
}
impl Into<SessionMode> for XRSessionMode {
fn into(self) -> SessionMode {
match self {
XRSessionMode::Immersive_vr => SessionMode::ImmersiveVR,
XRSessionMode::Immersive_ar => SessionMode::ImmersiveAR,
XRSessionMode::Inline => SessionMode::Inline,
}
}
}
impl XRMethods for XR {
/// https://immersive-web.github.io/webxr/#dom-xr-supportssessionmode
fn SupportsSessionMode(&self, mode: XRSessionMode, comp: InCompartment) -> Rc<Promise> {
#[derive(serde::Serialize, serde::Deserialize)]
pub struct SupportsSession {
sender: IpcSender<bool>,
}
#[typetag::serde]
impl webxr_api::SessionSupportCallback for SupportsSession {
fn callback(&mut self, result: Result<(), XRError>) {
let _ = self.sender.send(result.is_ok());
}
}
// XXXManishearth this should select an XR device first
let promise = Promise::new_in_current_compartment(&self.global(), comp);
if mode == XRSessionMode::Immersive_vr {
promise.resolve_native(&());
let mut trusted = Some(TrustedPromise::new(promise.clone()));
let global = self.global();
let window = global.as_window();
let (task_source, canceller) = window
.task_manager()
.dom_manipulation_task_source_with_canceller();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |message| {
// router doesn't know this is only called once
let trusted = if let Some(trusted) = trusted.take() {
trusted
} else {
// XXXManishearth support other modes
promise.reject_error(Error::NotSupported);
}
error!("supportsSession callback called twice!");
return;
};
let message = if let Ok(message) = message.to() {
message
} else {
error!("supportsSession callback given incorrect payload");
return;
};
if message {
let _ = task_source.queue_with_canceller(trusted.resolve_task(()), &canceller);
} else {
let _ = task_source
.queue_with_canceller(trusted.reject_task(Error::NotSupported), &canceller);
};
}),
);
window
.webxr_registry()
.supports_session(mode.into(), SupportsSession { sender });
promise
}
@ -106,6 +155,17 @@ impl XRMethods for XR {
options: &XRSessionCreationOptions,
comp: InCompartment,
) -> Rc<Promise> {
#[derive(serde::Serialize, serde::Deserialize)]
pub struct RequestSession {
sender: IpcSender<Result<Session, XRError>>,
}
#[typetag::serde]
impl webxr_api::SessionRequestCallback for RequestSession {
fn callback(&mut self, result: Result<Session, XRError>) {
let _ = self.sender.send(result);
}
}
let promise = Promise::new_in_current_compartment(&self.global(), comp);
if options.mode != XRSessionMode::Immersive_vr {
promise.reject_error(Error::NotSupported);
@ -116,28 +176,42 @@ impl XRMethods for XR {
promise.reject_error(Error::InvalidState);
return promise;
}
// we set pending immersive session to true further down
// to handle rejections in a cleaner way
let displays = self.get_displays();
let displays = match displays {
Ok(d) => d,
Err(_) => {
promise.reject_native(&());
return promise;
},
};
// XXXManishearth filter for displays which can_present
if displays.is_empty() {
promise.reject_error(Error::Security);
}
self.set_pending();
let session = XRSession::new(&self.global(), &displays[0]);
session.xr_present(promise.clone());
let promise = Promise::new_in_current_compartment(&self.global(), comp);
let mut trusted = Some(TrustedPromise::new(promise.clone()));
let this = Trusted::new(self);
let global = self.global();
let window = global.as_window();
let (task_source, canceller) = window
.task_manager()
.dom_manipulation_task_source_with_canceller();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |message| {
// router doesn't know this is only called once
let trusted = trusted.take().unwrap();
let this = this.clone();
let message = if let Ok(message) = message.to() {
message
} else {
error!("requestSession callback given incorrect payload");
return;
};
let _ = task_source.queue_with_canceller(
task!(request_session: move || {
this.root().session_obtained(message, trusted.root());
}),
&canceller,
);
}),
);
window
.webxr_registry()
.request_session(options.mode.into(), RequestSession { sender });
promise
}
@ -148,6 +222,20 @@ impl XRMethods for XR {
}
impl XR {
fn session_obtained(&self, response: Result<Session, XRError>, promise: Rc<Promise>) {
let session = match response {
Ok(session) => session,
Err(_) => {
promise.reject_native(&());
return;
},
};
let session = XRSession::new(&self.global(), session);
self.set_active_immersive_session(&session);
promise.resolve_native(&session);
}
pub fn get_displays(&self) -> Result<Vec<DomRoot<VRDisplay>>, ()> {
if let Some(webvr_thread) = self.webvr_thread() {
let (sender, receiver) =

View file

@ -15,36 +15,47 @@ use crate::dom::xrsession::XRSession;
use crate::dom::xrspace::XRSpace;
use crate::dom::xrviewerpose::XRViewerPose;
use dom_struct::dom_struct;
use webvr_traits::WebVRFrameData;
use std::cell::Cell;
use webxr_api::Frame;
#[dom_struct]
pub struct XRFrame {
reflector_: Reflector,
session: Dom<XRSession>,
#[ignore_malloc_size_of = "defined in rust-webvr"]
data: WebVRFrameData,
data: Frame,
active: Cell<bool>,
animation_frame: Cell<bool>,
}
impl XRFrame {
fn new_inherited(session: &XRSession, data: WebVRFrameData) -> XRFrame {
fn new_inherited(session: &XRSession, data: Frame) -> XRFrame {
XRFrame {
reflector_: Reflector::new(),
session: Dom::from_ref(session),
data,
active: Cell::new(false),
animation_frame: Cell::new(false),
}
}
pub fn new(
global: &GlobalScope,
session: &XRSession,
data: WebVRFrameData,
) -> DomRoot<XRFrame> {
pub fn new(global: &GlobalScope, session: &XRSession, data: Frame) -> DomRoot<XRFrame> {
reflect_dom_object(
Box::new(XRFrame::new_inherited(session, data)),
global,
XRFrameBinding::Wrap,
)
}
/// https://immersive-web.github.io/webxr/#xrframe-active
pub fn set_active(&self, active: bool) {
self.active.set(active);
}
/// https://immersive-web.github.io/webxr/#xrframe-animationframe
pub fn set_animation_frame(&self, animation_frame: bool) {
self.animation_frame.set(animation_frame);
}
}
impl XRFrameMethods for XRFrame {
@ -61,13 +72,13 @@ impl XRFrameMethods for XRFrame {
if self.session != reference.upcast::<XRSpace>().session() {
return Err(Error::InvalidState);
}
if !self.active.get() || !self.animation_frame.get() {
return Err(Error::InvalidState);
}
let pose = reference.get_viewer_pose(&self.data);
Ok(Some(XRViewerPose::new(
&self.global(),
&self.session,
pose,
&self.data,
)))
Ok(Some(XRViewerPose::new(&self.global(), &self.session, pose)))
}
/// https://immersive-web.github.io/webxr/#dom-xrframe-getpose
@ -79,6 +90,9 @@ impl XRFrameMethods for XRFrame {
if self.session != space.session() || self.session != relative_to.session() {
return Err(Error::InvalidState);
}
if !self.active.get() {
return Err(Error::InvalidState);
}
let space = space.get_pose(&self.data);
let relative_to = relative_to.get_pose(&self.data);
let pose = relative_to.inverse().pre_mul(&space);

View file

@ -2,7 +2,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::XRInputSourceBinding;
use crate::dom::bindings::codegen::Bindings::XRInputSourceBinding::{
XRHandedness, XRInputSourceMethods,
@ -13,30 +12,24 @@ use crate::dom::globalscope::GlobalScope;
use crate::dom::xrsession::XRSession;
use crate::dom::xrspace::XRSpace;
use dom_struct::dom_struct;
use webvr_traits::{WebVRGamepadData, WebVRGamepadHand, WebVRGamepadState, WebVRPose};
use webxr_api::{Handedness, InputId, InputSource};
#[dom_struct]
pub struct XRInputSource {
reflector: Reflector,
session: Dom<XRSession>,
#[ignore_malloc_size_of = "Defined in rust-webvr"]
data: WebVRGamepadData,
info: InputSource,
#[ignore_malloc_size_of = "Defined in rust-webvr"]
state: DomRefCell<WebVRGamepadState>,
target_ray_space: MutNullableDom<XRSpace>,
}
impl XRInputSource {
pub fn new_inherited(
session: &XRSession,
data: WebVRGamepadData,
state: WebVRGamepadState,
) -> XRInputSource {
pub fn new_inherited(session: &XRSession, info: InputSource) -> XRInputSource {
XRInputSource {
reflector: Reflector::new(),
session: Dom::from_ref(session),
data,
state: DomRefCell::new(state),
info,
target_ray_space: Default::default(),
}
}
@ -44,32 +37,27 @@ impl XRInputSource {
pub fn new(
global: &GlobalScope,
session: &XRSession,
data: WebVRGamepadData,
state: WebVRGamepadState,
info: InputSource,
) -> DomRoot<XRInputSource> {
reflect_dom_object(
Box::new(XRInputSource::new_inherited(session, data, state)),
Box::new(XRInputSource::new_inherited(session, info)),
global,
XRInputSourceBinding::Wrap,
)
}
pub fn update_state(&self, state: WebVRGamepadState) {
*self.state.borrow_mut() = state;
}
pub fn pose(&self) -> WebVRPose {
self.state.borrow().pose
pub fn id(&self) -> InputId {
self.info.id
}
}
impl XRInputSourceMethods for XRInputSource {
/// https://immersive-web.github.io/webxr/#dom-xrinputsource-handedness
fn Handedness(&self) -> XRHandedness {
match self.data.hand {
WebVRGamepadHand::Unknown => XRHandedness::None,
WebVRGamepadHand::Left => XRHandedness::Left,
WebVRGamepadHand::Right => XRHandedness::Right,
match self.info.handedness {
Handedness::None => XRHandedness::None,
Handedness::Left => XRHandedness::Left,
Handedness::Right => XRHandedness::Right,
}
}

View file

@ -8,8 +8,8 @@ use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrrigidtransform::XRRigidTransform;
use crate::dom::xrsession::ApiRigidTransform;
use dom_struct::dom_struct;
use euclid::RigidTransform3D;
#[dom_struct]
pub struct XRPose {
@ -26,7 +26,7 @@ impl XRPose {
}
#[allow(unused)]
pub fn new(global: &GlobalScope, transform: RigidTransform3D<f64>) -> DomRoot<XRPose> {
pub fn new(global: &GlobalScope, transform: ApiRigidTransform) -> DomRoot<XRPose> {
let transform = XRRigidTransform::new(global, transform);
reflect_dom_object(
Box::new(XRPose::new_inherited(&transform)),

View file

@ -10,11 +10,11 @@ use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrrigidtransform::XRRigidTransform;
use crate::dom::xrsession::XRSession;
use crate::dom::xrsession::{cast_transform, ApiPose, ApiRigidTransform, ApiViewerPose, XRSession};
use crate::dom::xrspace::XRSpace;
use dom_struct::dom_struct;
use euclid::{RigidTransform3D, Vector3D};
use webvr_traits::WebVRFrameData;
use euclid::{TypedRigidTransform3D, TypedVector3D};
use webxr_api::Frame;
#[dom_struct]
pub struct XRReferenceSpace {
@ -80,7 +80,7 @@ impl XRReferenceSpace {
///
/// This is equivalent to `get_pose(self).inverse() * get_pose(viewerSpace)` (in column vector notation),
/// however we specialize it to be efficient
pub fn get_viewer_pose(&self, base_pose: &WebVRFrameData) -> RigidTransform3D<f64> {
pub fn get_viewer_pose(&self, base_pose: &Frame) -> ApiViewerPose {
let pose = self.get_unoffset_viewer_pose(base_pose);
// This may change, see https://github.com/immersive-web/webxr/issues/567
@ -98,8 +98,8 @@ impl XRReferenceSpace {
/// Gets pose of the viewer with respect to this space
///
/// Does not apply originOffset, use get_viewer_pose instead if you need it
pub fn get_unoffset_viewer_pose(&self, base_pose: &WebVRFrameData) -> RigidTransform3D<f64> {
let viewer_pose = XRSpace::pose_to_transform(&base_pose.pose);
pub fn get_unoffset_viewer_pose(&self, base_pose: &Frame) -> ApiViewerPose {
let viewer_pose: ApiViewerPose = cast_transform(base_pose.transform);
// all math is in column-vector notation
// we use the following equation to verify correctness here:
// get_viewer_pose(space) = get_pose(space).inverse() * get_pose(viewer_space)
@ -120,13 +120,13 @@ impl XRReferenceSpace {
// = Translate(2) * viewer_pose
// assume approximate user height of 2 meters
let floor_to_eye: RigidTransform3D<f64> = Vector3D::new(0., 2., 0.).into();
let floor_to_eye: ApiRigidTransform = TypedVector3D::new(0., 2., 0.).into();
floor_to_eye.pre_mul(&viewer_pose)
},
XRReferenceSpaceType::Viewer => {
// This reference space follows the viewer around, so the viewer is
// always at an identity transform with respect to it
RigidTransform3D::identity()
TypedRigidTransform3D::identity()
},
_ => unimplemented!(),
}
@ -137,7 +137,7 @@ impl XRReferenceSpace {
/// The reference origin used is common between all
/// get_pose calls for spaces from the same device, so this can be used to compare
/// with other spaces
pub fn get_pose(&self, base_pose: &WebVRFrameData) -> RigidTransform3D<f64> {
pub fn get_pose(&self, base_pose: &Frame) -> ApiPose {
let pose = self.get_unoffset_pose(base_pose);
// This may change, see https://github.com/immersive-web/webxr/issues/567
@ -148,21 +148,21 @@ impl XRReferenceSpace {
/// Gets pose represented by this space
///
/// Does not apply originOffset, use get_viewer_pose instead if you need it
pub fn get_unoffset_pose(&self, base_pose: &WebVRFrameData) -> RigidTransform3D<f64> {
pub fn get_unoffset_pose(&self, base_pose: &Frame) -> ApiPose {
match self.ty {
XRReferenceSpaceType::Local => {
// The eye-level pose is basically whatever the headset pose was at t=0, which
// for most devices is (0, 0, 0)
RigidTransform3D::identity()
TypedRigidTransform3D::identity()
},
XRReferenceSpaceType::Local_floor => {
// XXXManishearth support getting floor info from stage parameters
// Assume approximate height of 2m
// the floor-level space is 2m below the eye-level space, which is (0, 0, 0)
Vector3D::new(0., -2., 0.).into()
TypedVector3D::new(0., -2., 0.).into()
},
XRReferenceSpaceType::Viewer => XRSpace::pose_to_transform(&base_pose.pose),
XRReferenceSpaceType::Viewer => cast_transform(base_pose.transform),
_ => unimplemented!(),
}
}

View file

@ -4,8 +4,7 @@
use crate::dom::bindings::codegen::Bindings::XRRenderStateBinding::{self, XRRenderStateMethods};
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::reflector::Reflector;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrlayer::XRLayer;
@ -47,6 +46,25 @@ impl XRRenderState {
XRRenderStateBinding::Wrap,
)
}
pub fn clone_object(&self) -> DomRoot<Self> {
XRRenderState::new(
&self.global(),
self.depth_near.get(),
self.depth_far.get(),
self.layer.get().as_ref().map(|x| &**x),
)
}
pub fn set_depth_near(&self, depth: f64) {
self.depth_near.set(depth)
}
pub fn set_depth_far(&self, depth: f64) {
self.depth_far.set(depth)
}
pub fn set_layer(&self, layer: Option<&XRLayer>) {
self.layer.set(layer)
}
}
impl XRRenderStateMethods for XRRenderState {

View file

@ -14,8 +14,9 @@ use crate::dom::dompointreadonly::DOMPointReadOnly;
use crate::dom::globalscope::GlobalScope;
use crate::dom::vrframedata::create_typed_array;
use crate::dom::window::Window;
use crate::dom::xrsession::ApiRigidTransform;
use dom_struct::dom_struct;
use euclid::{RigidTransform3D, Rotation3D, Vector3D};
use euclid::{TypedRigidTransform3D, TypedRotation3D, TypedVector3D};
use js::jsapi::{Heap, JSContext, JSObject};
use std::ptr::NonNull;
@ -25,14 +26,14 @@ pub struct XRRigidTransform {
position: MutNullableDom<DOMPointReadOnly>,
orientation: MutNullableDom<DOMPointReadOnly>,
#[ignore_malloc_size_of = "defined in euclid"]
transform: RigidTransform3D<f64>,
transform: ApiRigidTransform,
inverse: MutNullableDom<XRRigidTransform>,
#[ignore_malloc_size_of = "defined in mozjs"]
matrix: Heap<*mut JSObject>,
}
impl XRRigidTransform {
fn new_inherited(transform: RigidTransform3D<f64>) -> XRRigidTransform {
fn new_inherited(transform: ApiRigidTransform) -> XRRigidTransform {
XRRigidTransform {
reflector_: Reflector::new(),
position: MutNullableDom::default(),
@ -43,10 +44,7 @@ impl XRRigidTransform {
}
}
pub fn new(
global: &GlobalScope,
transform: RigidTransform3D<f64>,
) -> DomRoot<XRRigidTransform> {
pub fn new(global: &GlobalScope, transform: ApiRigidTransform) -> DomRoot<XRRigidTransform> {
reflect_dom_object(
Box::new(XRRigidTransform::new_inherited(transform)),
global,
@ -55,7 +53,7 @@ impl XRRigidTransform {
}
pub fn identity(window: &GlobalScope) -> DomRoot<XRRigidTransform> {
let transform = RigidTransform3D::identity();
let transform = TypedRigidTransform3D::identity();
XRRigidTransform::new(window, transform)
}
@ -72,14 +70,20 @@ impl XRRigidTransform {
)));
}
let translate = Vector3D::new(position.x as f64, position.y as f64, position.z as f64);
let rotate = Rotation3D::unit_quaternion(
orientation.x as f64,
orientation.y as f64,
orientation.z as f64,
orientation.w as f64,
let translate = TypedVector3D::new(position.x as f32, position.y as f32, position.z as f32);
let rotate = TypedRotation3D::unit_quaternion(
orientation.x as f32,
orientation.y as f32,
orientation.z as f32,
orientation.w as f32,
);
let transform = RigidTransform3D::new(rotate, translate);
if !rotate.i.is_finite() {
// if quaternion has zero norm, we'll get an infinite or NaN
// value for each element. This is preferable to checking for zero.
return Err(Error::InvalidState);
}
let transform = TypedRigidTransform3D::new(rotate, translate);
Ok(XRRigidTransform::new(&window.global(), transform))
}
}
@ -89,14 +93,20 @@ impl XRRigidTransformMethods for XRRigidTransform {
fn Position(&self) -> DomRoot<DOMPointReadOnly> {
self.position.or_init(|| {
let t = &self.transform.translation;
DOMPointReadOnly::new(&self.global(), t.x, t.y, t.z, 1.0)
DOMPointReadOnly::new(&self.global(), t.x.into(), t.y.into(), t.z.into(), 1.0)
})
}
// https://immersive-web.github.io/webxr/#dom-xrrigidtransform-orientation
fn Orientation(&self) -> DomRoot<DOMPointReadOnly> {
self.orientation.or_init(|| {
let r = &self.transform.rotation;
DOMPointReadOnly::new(&self.global(), r.i, r.j, r.k, r.r)
DOMPointReadOnly::new(
&self.global(),
r.i.into(),
r.j.into(),
r.k.into(),
r.r.into(),
)
})
}
// https://immersive-web.github.io/webxr/#dom-xrrigidtransform-inverse
@ -114,7 +124,7 @@ impl XRRigidTransformMethods for XRRigidTransform {
let cx = self.global().get_cx();
// According to the spec all matrices are column-major,
// however euclid uses row vectors so we use .to_row_major_array()
let arr = self.transform.to_transform().cast().to_row_major_array();
let arr = self.transform.to_transform().to_row_major_array();
create_typed_array(cx, &arr, &self.matrix);
}
NonNull::new(self.matrix.get()).unwrap()
@ -123,7 +133,7 @@ impl XRRigidTransformMethods for XRRigidTransform {
impl XRRigidTransform {
/// https://immersive-web.github.io/webxr/#dom-xrpose-transform
pub fn transform(&self) -> RigidTransform3D<f64> {
pub fn transform(&self) -> ApiRigidTransform {
self.transform
}
}

View file

@ -3,68 +3,156 @@
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::compartments::InCompartment;
use crate::dom::bindings::codegen::Bindings::VRDisplayBinding::VRDisplayMethods;
use crate::dom::bindings::callback::ExceptionHandling;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::XRBinding::XRSessionMode;
use crate::dom::bindings::codegen::Bindings::XRReferenceSpaceBinding::XRReferenceSpaceType;
use crate::dom::bindings::codegen::Bindings::XRRenderStateBinding::XRRenderStateInit;
use crate::dom::bindings::codegen::Bindings::XRRenderStateBinding::XRRenderStateMethods;
use crate::dom::bindings::codegen::Bindings::XRSessionBinding;
use crate::dom::bindings::codegen::Bindings::XRSessionBinding::XREnvironmentBlendMode;
use crate::dom::bindings::codegen::Bindings::XRSessionBinding::XRFrameRequestCallback;
use crate::dom::bindings::codegen::Bindings::XRSessionBinding::XRSessionMethods;
use crate::dom::bindings::codegen::Bindings::XRWebGLLayerBinding::XRWebGLLayerMethods;
use crate::dom::bindings::error::Error;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::bindings::root::{Dom, DomRoot, MutDom, MutNullableDom};
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::promise::Promise;
use crate::dom::vrdisplay::VRDisplay;
use crate::dom::xrframe::XRFrame;
use crate::dom::xrinputsource::XRInputSource;
use crate::dom::xrlayer::XRLayer;
use crate::dom::xrreferencespace::XRReferenceSpace;
use crate::dom::xrrenderstate::XRRenderState;
use crate::dom::xrspace::XRSpace;
use crate::dom::xrwebgllayer::XRWebGLLayer;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use euclid::TypedRigidTransform3D;
use ipc_channel::ipc::IpcSender;
use ipc_channel::router::ROUTER;
use profile_traits::ipc;
use std::cell::Cell;
use std::mem;
use std::rc::Rc;
use webxr_api::{self, Frame, Session};
#[dom_struct]
pub struct XRSession {
eventtarget: EventTarget,
display: Dom<VRDisplay>,
base_layer: MutNullableDom<XRLayer>,
blend_mode: XREnvironmentBlendMode,
viewer_space: MutNullableDom<XRSpace>,
#[ignore_malloc_size_of = "defined in webxr"]
session: DomRefCell<Session>,
frame_requested: Cell<bool>,
pending_render_state: MutNullableDom<XRRenderState>,
active_render_state: MutDom<XRRenderState>,
next_raf_id: Cell<i32>,
#[ignore_malloc_size_of = "closures are hard"]
raf_callback_list: DomRefCell<Vec<(i32, Option<Rc<XRFrameRequestCallback>>)>>,
#[ignore_malloc_size_of = "defined in ipc-channel"]
raf_sender: DomRefCell<Option<IpcSender<(f64, Frame)>>>,
input_sources: DomRefCell<Vec<Dom<XRInputSource>>>,
}
impl XRSession {
fn new_inherited(display: &VRDisplay) -> XRSession {
fn new_inherited(session: Session, render_state: &XRRenderState) -> XRSession {
XRSession {
eventtarget: EventTarget::new_inherited(),
display: Dom::from_ref(display),
base_layer: Default::default(),
// we don't yet support any AR devices
blend_mode: XREnvironmentBlendMode::Opaque,
viewer_space: Default::default(),
session: DomRefCell::new(session),
frame_requested: Cell::new(false),
pending_render_state: MutNullableDom::new(None),
active_render_state: MutDom::new(render_state),
next_raf_id: Cell::new(0),
raf_callback_list: DomRefCell::new(vec![]),
raf_sender: DomRefCell::new(None),
input_sources: DomRefCell::new(vec![]),
}
}
pub fn new(global: &GlobalScope, display: &VRDisplay) -> DomRoot<XRSession> {
reflect_dom_object(
Box::new(XRSession::new_inherited(display)),
pub fn new(global: &GlobalScope, session: Session) -> DomRoot<XRSession> {
let render_state = XRRenderState::new(global, 0.1, 1000.0, None);
let ret = reflect_dom_object(
Box::new(XRSession::new_inherited(session, &render_state)),
global,
XRSessionBinding::Wrap,
)
);
{
let mut input_sources = ret.input_sources.borrow_mut();
for info in ret.session.borrow().initial_inputs() {
// XXXManishearth we should be able to listen for updates
// to the input sources
let input = XRInputSource::new(global, &ret, *info);
input_sources.push(Dom::from_ref(&input));
}
}
ret
}
pub fn xr_present(&self, p: Rc<Promise>) {
self.display.xr_present(self, None, Some(p));
pub fn with_session<F: FnOnce(&Session)>(&self, with: F) {
let session = self.session.borrow();
with(&session)
}
pub fn display(&self) -> &VRDisplay {
&self.display
/// https://immersive-web.github.io/webxr/#xr-animation-frame
fn raf_callback(&self, (time, frame): (f64, Frame)) {
// Step 1
if let Some(pending) = self.pending_render_state.take() {
// https://immersive-web.github.io/webxr/#apply-the-pending-render-state
// (Steps 1-4 are implicit)
// Step 5
self.active_render_state.set(&pending);
// Step 6-7: XXXManishearth handle inlineVerticalFieldOfView
// XXXManishearth handle inline sessions and composition disabled flag
let layer = pending.GetBaseLayer();
if let Some(layer) = layer {
let mut session = self.session.borrow_mut();
if let Some(layer) = layer.downcast::<XRWebGLLayer>() {
session.update_webgl_external_image_api(
layer.Context().webgl_sender().webxr_external_image_api(),
);
} else {
error!("updateRenderState() called with unknown layer type")
}
}
}
pub fn set_layer(&self, layer: &XRLayer) {
self.base_layer.set(Some(layer))
// Step 2
if self.active_render_state.get().GetBaseLayer().is_none() {
return;
}
// Step 3: XXXManishearth handle inline session
// Step 4-5
let mut callbacks = mem::replace(&mut *self.raf_callback_list.borrow_mut(), vec![]);
let frame = XRFrame::new(&self.global(), self, frame);
// Step 6,7
frame.set_active(true);
frame.set_animation_frame(true);
// Step 8
for (_, callback) in callbacks.drain(..) {
if let Some(callback) = callback {
let _ = callback.Call__(Finite::wrap(time), &frame, ExceptionHandling::Report);
}
}
// Step 9: XXXManishearth unset `active` bool on `frame`
self.session.borrow_mut().render_animation_frame();
}
}
@ -76,30 +164,93 @@ impl XRSessionMethods for XRSession {
// https://immersive-web.github.io/webxr/#dom-xrsession-renderstate
fn RenderState(&self) -> DomRoot<XRRenderState> {
// XXXManishearth maybe cache this
XRRenderState::new(
&self.global(),
*self.display.DepthNear(),
*self.display.DepthFar(),
self.base_layer.get().as_ref().map(|l| &**l),
)
self.active_render_state.get()
}
/// https://immersive-web.github.io/webxr/#dom-xrsession-requestanimationframe
fn UpdateRenderState(&self, init: &XRRenderStateInit, comp: InCompartment) -> Rc<Promise> {
let p = Promise::new_in_current_compartment(&self.global(), comp);
self.display.queue_renderstate(init, p.clone());
p
/// https://immersive-web.github.io/webxr/#dom-xrsession-updaterenderstate
fn UpdateRenderState(&self, init: &XRRenderStateInit, _: InCompartment) {
// XXXManishearth various checks:
// If sessions ended value is true, throw an InvalidStateError and abort these steps
// If newStates baseLayer's was created with an XRSession other than session,
// throw an InvalidStateError and abort these steps
// If newStates inlineVerticalFieldOfView is set and session is an
// immersive session, throw an InvalidStateError and abort these steps.
let pending = self
.pending_render_state
.or_init(|| self.active_render_state.get().clone_object());
if let Some(near) = init.depthNear {
pending.set_depth_near(*near);
}
if let Some(far) = init.depthFar {
pending.set_depth_far(*far);
}
if let Some(ref layer) = init.baseLayer {
pending.set_layer(Some(&layer))
}
// XXXManishearth handle inlineVerticalFieldOfView
}
/// https://immersive-web.github.io/webxr/#dom-xrsession-requestanimationframe
fn RequestAnimationFrame(&self, callback: Rc<XRFrameRequestCallback>) -> i32 {
self.display.xr_raf(callback) as i32
#[derive(serde::Serialize, serde::Deserialize)]
pub struct FrameCallback {
sender: IpcSender<(f64, Frame)>,
}
#[typetag::serde]
impl webxr_api::FrameRequestCallback for FrameCallback {
fn callback(&mut self, time: f64, frame: Frame) {
let _ = self.sender.send((time, frame));
}
}
// queue up RAF callback, obtain ID
let raf_id = self.next_raf_id.get();
self.next_raf_id.set(raf_id + 1);
self.raf_callback_list
.borrow_mut()
.push((raf_id, Some(callback)));
// set up listener for response, if necessary
if self.raf_sender.borrow().is_none() {
let this = Trusted::new(self);
let global = self.global();
let window = global.as_window();
let (task_source, canceller) = window
.task_manager()
.dom_manipulation_task_source_with_canceller();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
*self.raf_sender.borrow_mut() = Some(sender);
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |message| {
let this = this.clone();
let _ = task_source.queue_with_canceller(
task!(xr_raf_callback: move || {
this.root().raf_callback(message.to().unwrap());
}),
&canceller,
);
}),
);
}
let sender = self.raf_sender.borrow().clone().unwrap();
// request animation frame
self.session
.borrow_mut()
.request_animation_frame(FrameCallback { sender });
raf_id
}
/// https://immersive-web.github.io/webxr/#dom-xrsession-cancelanimationframe
fn CancelAnimationFrame(&self, frame: i32) {
self.display.xr_cancel_raf(frame)
let mut list = self.raf_callback_list.borrow_mut();
if let Some(pair) = list.iter_mut().find(|pair| pair.0 == frame) {
pair.1 = None;
}
}
/// https://immersive-web.github.io/webxr/#dom-xrsession-environmentblendmode
@ -131,6 +282,26 @@ impl XRSessionMethods for XRSession {
/// https://immersive-web.github.io/webxr/#dom-xrsession-getinputsources
fn GetInputSources(&self) -> Vec<DomRoot<XRInputSource>> {
self.display.get_input_sources()
self.input_sources
.borrow()
.iter()
.map(|x| DomRoot::from_ref(&**x))
.collect()
}
}
#[derive(Clone, Copy, Debug)]
pub struct ApiSpace;
// The pose of an object in native-space. Should never be exposed.
pub type ApiPose = TypedRigidTransform3D<f32, ApiSpace, webxr_api::Native>;
// The pose of the viewer in some api-space.
pub type ApiViewerPose = TypedRigidTransform3D<f32, webxr_api::Viewer, ApiSpace>;
// A transform between objects in some API-space
pub type ApiRigidTransform = TypedRigidTransform3D<f32, ApiSpace, ApiSpace>;
#[allow(unsafe_code)]
pub fn cast_transform<T, U, V, W>(
transform: TypedRigidTransform3D<f32, T, U>,
) -> TypedRigidTransform3D<f32, V, W> {
unsafe { mem::transmute(transform) }
}

View file

@ -10,10 +10,9 @@ use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrinputsource::XRInputSource;
use crate::dom::xrreferencespace::XRReferenceSpace;
use crate::dom::xrsession::XRSession;
use crate::dom::xrsession::{cast_transform, ApiPose, XRSession};
use dom_struct::dom_struct;
use euclid::{RigidTransform3D, Rotation3D, Vector3D};
use webvr_traits::{WebVRFrameData, WebVRPose};
use webxr_api::Frame;
#[dom_struct]
pub struct XRSpace {
@ -58,30 +57,27 @@ impl XRSpace {
/// The reference origin used is common between all
/// get_pose calls for spaces from the same device, so this can be used to compare
/// with other spaces
pub fn get_pose(&self, base_pose: &WebVRFrameData) -> RigidTransform3D<f64> {
pub fn get_pose(&self, base_pose: &Frame) -> ApiPose {
if let Some(reference) = self.downcast::<XRReferenceSpace>() {
reference.get_pose(base_pose)
} else if let Some(source) = self.input_source.get() {
XRSpace::pose_to_transform(&source.pose())
// XXXManishearth we should be able to request frame information
// for inputs when necessary instead of always loading it
//
// Also, the below code is quadratic, so this API may need an overhaul anyway
let id = source.id();
// XXXManishearth once we have dynamic inputs we'll need to handle this better
let frame = base_pose
.inputs
.iter()
.find(|i| i.id == id)
.expect("no input found");
cast_transform(frame.target_ray_origin)
} else {
unreachable!()
}
}
pub fn pose_to_transform(pose: &WebVRPose) -> RigidTransform3D<f64> {
let pos = pose.position.unwrap_or([0., 0., 0.]);
let translation = Vector3D::new(pos[0] as f64, pos[1] as f64, pos[2] as f64);
let orient = pose.orientation.unwrap_or([0., 0., 0., 0.]);
let rotation = Rotation3D::quaternion(
orient[0] as f64,
orient[1] as f64,
orient[2] as f64,
orient[3] as f64,
)
.normalize();
RigidTransform3D::new(rotation, translation)
}
pub fn session(&self) -> &XRSession {
&self.session
}

View file

@ -9,15 +9,21 @@
use crate::dom::bindings::codegen::Bindings::XRTestBinding::{
self, FakeXRDeviceInit, XRTestMethods,
};
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::fakexrdevice::{get_origin, get_views, FakeXRDevice};
use crate::dom::globalscope::GlobalScope;
use crate::dom::promise::Promise;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use euclid::TypedRigidTransform3D;
use ipc_channel::ipc::IpcSender;
use ipc_channel::router::ROUTER;
use profile_traits::ipc;
use std::cell::Cell;
use std::rc::Rc;
use webvr_traits::{MockVRInit, WebVRMsg};
use webxr_api::{self, Error as XRError, MockDeviceInit, MockDeviceMsg};
#[dom_struct]
pub struct XRTest {
@ -40,11 +46,39 @@ impl XRTest {
XRTestBinding::Wrap,
)
}
fn device_obtained(
&self,
response: Result<IpcSender<MockDeviceMsg>, XRError>,
trusted: TrustedPromise,
) {
let promise = trusted.root();
if let Ok(sender) = response {
let device = FakeXRDevice::new(&self.global(), sender);
promise.resolve_native(&device);
} else {
promise.reject_native(&());
}
}
}
impl XRTestMethods for XRTest {
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn SimulateDeviceConnection(&self, init: &FakeXRDeviceInit) -> Rc<Promise> {
#[derive(serde::Serialize, serde::Deserialize)]
pub struct MockDevice {
sender: IpcSender<Result<IpcSender<MockDeviceMsg>, XRError>>,
}
#[typetag::serde]
impl webxr_api::MockDeviceCallback for MockDevice {
fn callback(&mut self, result: Result<IpcSender<MockDeviceMsg>, XRError>) {
self.sender
.send(result)
.expect("mock device callback failed");
}
}
let p = Promise::new(&self.global());
if !init.supportsImmersive || self.session_started.get() {
@ -61,7 +95,19 @@ impl XRTestMethods for XRTest {
},
}
} else {
Default::default()
TypedRigidTransform3D::identity()
};
let floor_origin = if let Some(ref o) = init.floorOrigin {
match get_origin(&o) {
Ok(origin) => origin,
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
TypedRigidTransform3D::identity()
};
let views = match get_views(&init.views) {
@ -72,20 +118,47 @@ impl XRTestMethods for XRTest {
},
};
let init = MockVRInit {
viewer_origin: Some(origin),
views: Some(views),
eye_level: None,
let init = MockDeviceInit {
viewer_origin: origin,
views,
supports_immersive: init.supportsImmersive,
supports_unbounded: init.supportsUnbounded,
floor_origin,
};
self.session_started.set(true);
self.global()
.as_window()
.webvr_thread()
.unwrap()
.send(WebVRMsg::CreateMockDisplay(init))
.unwrap();
p.resolve_native(&FakeXRDevice::new(&self.global()));
let global = self.global();
let window = global.as_window();
let this = Trusted::new(self);
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = window
.task_manager()
.dom_manipulation_task_source_with_canceller();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |message| {
let trusted = trusted
.take()
.expect("SimulateDeviceConnection callback called twice");
let this = this.clone();
let message = message
.to()
.expect("SimulateDeviceConnection callback given incorrect payload");
let _ = task_source.queue_with_canceller(
task!(request_session: move || {
this.root().device_obtained(message, trusted);
}),
&canceller,
);
}),
);
window
.webxr_registry()
.simulate_device_connection(init, MockDevice { sender });
p
}

View file

@ -9,12 +9,11 @@ use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::globalscope::GlobalScope;
use crate::dom::vrframedata::create_typed_array;
use crate::dom::xrrigidtransform::XRRigidTransform;
use crate::dom::xrsession::XRSession;
use crate::dom::xrsession::{cast_transform, ApiViewerPose, XRSession};
use dom_struct::dom_struct;
use euclid::{RigidTransform3D, Vector3D};
use js::jsapi::{Heap, JSContext, JSObject};
use std::ptr::NonNull;
use webvr_traits::WebVRFrameData;
use webxr_api::View;
#[dom_struct]
pub struct XRView {
@ -41,31 +40,21 @@ impl XRView {
}
#[allow(unsafe_code)]
pub fn new(
pub fn new<V: Copy>(
global: &GlobalScope,
session: &XRSession,
view: &View<V>,
eye: XREye,
pose: &RigidTransform3D<f64>,
data: &WebVRFrameData,
pose: &ApiViewerPose,
) -> DomRoot<XRView> {
let vr_display = session.display();
// XXXManishearth compute and cache projection matrices on the Display
let (proj, offset) = if eye == XREye::Left {
(
&data.left_projection_matrix,
vr_display.left_eye_params_offset(),
)
} else {
(
&data.right_projection_matrix,
vr_display.right_eye_params_offset(),
)
};
let offset = Vector3D::new(offset[0] as f64, offset[1] as f64, offset[2] as f64);
let transform = pose.post_mul(&offset.into());
let transform = XRRigidTransform::new(global, transform);
// this transform is the pose of the viewer in the eye space, i.e. it is the transform
// from the viewer space to the eye space. We invert it to get the pose of the eye in the viewer space.
let offset = view.transform.inverse();
let transform = pose.pre_mul(&offset);
let transform = XRRigidTransform::new(global, cast_transform(transform));
let ret = reflect_dom_object(
Box::new(XRView::new_inherited(session, &transform, eye)),
@ -73,9 +62,11 @@ impl XRView {
XRViewBinding::Wrap,
);
// row_major since euclid uses row vectors
let proj = view.projection.to_row_major_array();
let cx = global.get_cx();
unsafe {
create_typed_array(cx, proj, &ret.proj);
create_typed_array(cx, &proj, &ret.proj);
}
ret
}

View file

@ -10,14 +10,13 @@ use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrpose::XRPose;
use crate::dom::xrrigidtransform::XRRigidTransform;
use crate::dom::xrsession::XRSession;
use crate::dom::xrsession::{cast_transform, ApiViewerPose, XRSession};
use crate::dom::xrview::XRView;
use dom_struct::dom_struct;
use euclid::RigidTransform3D;
use js::conversions::ToJSValConvertible;
use js::jsapi::{Heap, JSContext};
use js::jsval::{JSVal, UndefinedValue};
use webvr_traits::WebVRFrameData;
use webxr_api::Views;
#[dom_struct]
pub struct XRViewerPose {
@ -38,12 +37,19 @@ impl XRViewerPose {
pub fn new(
global: &GlobalScope,
session: &XRSession,
pose: RigidTransform3D<f64>,
data: &WebVRFrameData,
pose: ApiViewerPose,
) -> DomRoot<XRViewerPose> {
let left = XRView::new(global, session, XREye::Left, &pose, &data);
let right = XRView::new(global, session, XREye::Right, &pose, &data);
let transform = XRRigidTransform::new(global, pose);
rooted_vec!(let mut views);
session.with_session(|s| match s.views() {
Views::Mono(view) => {
views.push(XRView::new(global, session, &view, XREye::Unknown, &pose))
},
Views::Stereo(left, right) => {
views.push(XRView::new(global, session, &left, XREye::Left, &pose));
views.push(XRView::new(global, session, &right, XREye::Right, &pose));
},
});
let transform = XRRigidTransform::new(global, cast_transform(pose));
let pose = reflect_dom_object(
Box::new(XRViewerPose::new_inherited(&transform)),
global,
@ -53,8 +59,7 @@ impl XRViewerPose {
unsafe {
let cx = global.get_cx();
rooted!(in(cx) let mut jsval = UndefinedValue());
let vec = vec![left, right];
vec.to_jsval(cx, jsval.handle_mut());
views.to_jsval(cx, jsval.handle_mut());
pose.views.set(jsval.get());
}

View file

@ -2975,6 +2975,7 @@ impl ScriptThread {
incomplete.navigation_start_precise,
self.webgl_chan.as_ref().map(|chan| chan.channel()),
self.webvr_chan.clone(),
self.webxr_registry.clone(),
self.microtask_queue.clone(),
self.webrender_document,
self.webrender_api_sender.clone(),

View file

@ -60,7 +60,7 @@ rust-webvr = { version = "0.13", features = ["glwindow"] }
servo-media = {git = "https://github.com/servo/media"}
tinyfiledialogs = "3.0"
webxr-api = { git = "https://github.com/servo/webxr", features = ["ipc"] }
webxr = { git = "https://github.com/servo/webxr", features = ["ipc", "glwindow"] }
webxr = { git = "https://github.com/servo/webxr", features = ["ipc", "glwindow", "headless"] }
[target.'cfg(any(target_os = "linux", target_os = "windows"))'.dependencies]
image = "0.21"

View file

@ -73,8 +73,10 @@ impl EmbedderMethods for EmbedderCallbacks {
}
fn register_webxr(&mut self, xr: &mut webxr_api::MainThreadRegistry) {
if !opts::get().headless {
if pref!(dom.webxr.test) {
let gl = self.gl.clone();
xr.register_mock(webxr::headless::HeadlessMockDiscovery::new(gl));
} else if !opts::get().headless && pref!(dom.webxr.glwindow) {
warn!("Creating test XR device");
let gl = self.gl.clone();
let events_loop_clone = self.events_loop.clone();
@ -86,5 +88,4 @@ impl EmbedderMethods for EmbedderCallbacks {
xr.register(discovery);
}
}
}
}

View file

@ -31,6 +31,7 @@
"dom.webvr.event_polling_interval": 500,
"dom.webvr.test": false,
"dom.webxr.enabled": false,
"dom.webxr.glwindow": false,
"dom.webxr.test": false,
"dom.worklet.timeout_ms": 10,
"gfx.subpixel-text-antialiasing.enabled": true,

View file

@ -19687,11 +19687,11 @@
"testharness"
],
"webxr/obtain_frame.html": [
"e2b4424d5779baedf6bdb50f1b3151336f31a4cb",
"063008c7ebc0df9997b8286296b4f7fe4663b331",
"testharness"
],
"webxr/resources/webxr-util.js": [
"554c1c183d3710e54dc60704dad0aac542ffd67c",
"f0c166e097271fd6a2709428fab2ccffea1eb08a",
"support"
]
},

View file

@ -54,9 +54,9 @@
pose = frame.getViewerPose(offset);
for (view of pose.views) {
if (view.eye == "left") {
assert_matrix_approx_equals(view.transform.matrix, [-1/3,-2/3,2/3,0,-2/3,2/3,1/3,0,-2/3,-1/3,-2/3,0,3.4,-1.9,-0.9,1], 0.001, "left offset transform");
assert_matrix_approx_equals(view.transform.matrix, [-1/3,-2/3,2/3,0,-2/3,2/3,1/3,0,-2/3,-1/3,-2/3,0,3.5 + 1/30,-1.9 + 2/30,-0.9 - 2/30,1], 0.001, "left offset transform");
} else if (view.eye == "right") {
assert_matrix_approx_equals(view.transform.matrix, [-1/3,-2/3,2/3,0,-2/3,2/3,1/3,0,-2/3,-1/3,-2/3,0,3.6,-1.9,-0.9,1], 0.001, "right offset transform");
assert_matrix_approx_equals(view.transform.matrix, [-1/3,-2/3,2/3,0,-2/3,2/3,1/3,0,-2/3,-1/3,-2/3,0,3.5 - 1/30,-1.9 - 2/30,-0.9 + 2/30,1], 0.001, "right offset transform");
} else {
throw "got unknown view";
}

View file

@ -1,10 +1,9 @@
// pieced together from various things in wpt/webxr/resources
const VALID_PROJECTION_MATRIX = [1, 0, 0, 0, 0, 1, 0, 0, 3, 2, -1, -1, 0, 0, -0.2, 0];
const LEFT_OFFSET = {position: [-0.1, 0, 0], orientation: [0,0,0,0]};
const RIGHT_OFFSET = {position: [0.1, 0, 0], orientation: [0,0,0,0]};
const LEFT_VIEWPORT = {x: 0, y: 0, width: 320, height: 480};
const RIGHT_VIEWPORT = {x: 320, y: 0, width: 320, height: 480};
const LEFT_OFFSET = {position: [-0.1, 0, 0], orientation: [0,0,0,1]};
const RIGHT_OFFSET = {position: [0.1, 0, 0], orientation: [0,0,0,1]};
const RESOLUTION = {width: 320, height: 480};
let assert_matrix_approx_equals = function(m1, m2, epsilon, prefix = "") {
assert_equals(m1.length, m2.length, prefix + "Matrix lengths should match");
for(var i = 0; i < m1.length; ++i) {
@ -13,6 +12,6 @@ let assert_matrix_approx_equals = function(m1, m2, epsilon, prefix = "") {
}
const TEST_VIEWS = [
{eye: "left", projectionMatrix: VALID_PROJECTION_MATRIX, viewOffset: LEFT_OFFSET, viewport: LEFT_VIEWPORT},
{eye: "right", projectionMatrix: VALID_PROJECTION_MATRIX, viewOffset: RIGHT_OFFSET, viewport: RIGHT_VIEWPORT}
{eye: "left", projectionMatrix: VALID_PROJECTION_MATRIX, viewOffset: LEFT_OFFSET, resolution: RESOLUTION},
{eye: "right", projectionMatrix: VALID_PROJECTION_MATRIX, viewOffset: RIGHT_OFFSET, resolution: RESOLUTION}
];