Auto merge of #14618 - MortimerGoro:webvr_api, r=larsbergstrom,emilio,jdm,nox,asajeffrey,cvan

WebVR API Implementation

<!-- Please describe your changes on the following line: -->

WebVR API Implementation with HTC Vive support on Windows. The current implementations only enables the WebVR support on Windows. In other platforms the API is available on JavaScript but navigator.vr.getDisplays() returns an empty array. This will change when we add support for more VR providers and platforms ;)

Info about the architecture:
https://blog.mozvr.com/webvr-servo-architecture-and-latency-optimizations/
---
<!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `__` with appropriate data: -->
- [X] `./mach build -d` does not report any errors
- [X] `./mach test-tidy` does not report any errors
- [ ] These changes fix #__ (github issue number if applicable).

<!-- Either: -->
- [X] There are tests for these changes OR
- [ ] These changes do not require tests because _____

Proprietary openvr.dll must be copied next to servo.exe in order to test on HTC Vive (https://github.com/ValveSoftware/openvr/tree/master/bin/win64) I have added some of the official WebVR samples for testing. Switch on your headset and run:

mach run tests/html/webvr/room-scale.html

<!-- Reviewable:start -->
---
This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/14618)
<!-- Reviewable:end -->
This commit is contained in:
bors-servo 2017-01-09 06:39:45 -08:00 committed by GitHub
commit 518ef39cfd
70 changed files with 13044 additions and 20 deletions

View file

@ -211,7 +211,8 @@ impl<'a> CanvasPaintThread<'a> {
}
}
}
CanvasMsg::WebGL(_) => panic!("Wrong message sent to Canvas2D thread"),
CanvasMsg::WebGL(_) => panic!("Wrong WebGL message sent to Canvas2D thread"),
CanvasMsg::WebVR(_) => panic!("Wrong WebVR message sent to Canvas2D thread"),
}
}
}).expect("Thread spawning failed");

View file

@ -144,6 +144,18 @@ impl WebGLPaintThread {
}
}
fn handle_webvr_message(&self, message: webrender_traits::VRCompositorCommand) {
match self.data {
WebGLPaintTaskData::WebRender(ref api, id) => {
api.send_vr_compositor_command(id, message);
}
WebGLPaintTaskData::Readback(..) => {
error!("Webrender is required for WebVR implementation");
}
}
}
/// Creates a new `WebGLPaintThread` and returns an `IpcSender` to
/// communicate with it.
pub fn start(size: Size2D<i32>,
@ -190,6 +202,7 @@ impl WebGLPaintThread {
}
}
CanvasMsg::Canvas2d(_) => panic!("Wrong message sent to WebGLThread"),
CanvasMsg::WebVR(message) => painter.handle_webvr_message(message)
}
}
}).expect("Thread spawning failed");

View file

@ -27,7 +27,7 @@ use euclid::size::Size2D;
use ipc_channel::ipc::IpcSender;
use std::default::Default;
use std::str::FromStr;
use webrender_traits::{WebGLCommand, WebGLContextId};
use webrender_traits::{WebGLCommand, WebGLContextId, VRCompositorCommand};
#[derive(Clone, Deserialize, Serialize)]
pub enum FillRule {
@ -42,6 +42,7 @@ pub enum CanvasMsg {
FromLayout(FromLayoutMsg),
FromScript(FromScriptMsg),
WebGL(WebGLCommand),
WebVR(VRCompositorCommand)
}
#[derive(Clone, Deserialize, Serialize)]

View file

@ -259,4 +259,8 @@ impl Preferences {
pub fn extend(&self, extension: HashMap<String, Pref>) {
self.0.write().unwrap().extend(extension);
}
pub fn is_webvr_enabled(&self) -> bool {
self.get("dom.webvr.enabled").as_boolean().unwrap_or(false)
}
}

View file

@ -36,6 +36,7 @@ servo_config = {path = "../config", features = ["servo"]}
servo_rand = {path = "../rand"}
servo_remutex = {path = "../remutex"}
servo_url = {path = "../url", features = ["servo"]}
webvr_traits = {path = "../webvr_traits"}
[dependencies.webrender_traits]
git = "https://github.com/servo/webrender"

View file

@ -101,6 +101,7 @@ use script_traits::{LayoutMsg as FromLayoutMsg, ScriptMsg as FromScriptMsg, Scri
use script_traits::{LogEntry, ServiceWorkerMsg, webdriver_msg};
use script_traits::{MozBrowserErrorType, MozBrowserEvent, WebDriverCommandMsg, WindowSizeData};
use script_traits::{SWManagerMsg, ScopeThings, WindowSizeType};
use script_traits::WebVREventMsg;
use servo_config::opts;
use servo_config::prefs::PREFS;
use servo_rand::{Rng, SeedableRng, ServoRng, random};
@ -122,6 +123,7 @@ use style_traits::cursor::Cursor;
use style_traits::viewport::ViewportConstraints;
use timer_scheduler::TimerScheduler;
use webrender_traits;
use webvr_traits::WebVRMsg;
/// The `Constellation` itself. In the servo browser, there is one
/// constellation, which maintains all of the browser global data.
@ -280,6 +282,9 @@ pub struct Constellation<Message, LTF, STF> {
/// Phantom data that keeps the Rust type system happy.
phantom: PhantomData<(Message, LTF, STF)>,
/// A channel through which messages can be sent to the webvr thread.
webvr_thread: Option<IpcSender<WebVRMsg>>,
}
/// State needed to construct a constellation.
@ -535,6 +540,7 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
info!("Using seed {} for random pipeline closure.", seed);
(rng, prob)
}),
webvr_thread: None
};
constellation.run();
@ -645,6 +651,7 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
prev_visibility: prev_visibility,
webrender_api_sender: self.webrender_api_sender.clone(),
is_private: is_private,
webvr_thread: self.webvr_thread.clone()
});
let pipeline = match result {
@ -879,6 +886,14 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
FromCompositorMsg::LogEntry(top_level_frame_id, thread_name, entry) => {
self.handle_log_entry(top_level_frame_id, thread_name, entry);
}
FromCompositorMsg::SetWebVRThread(webvr_thread) => {
assert!(self.webvr_thread.is_none());
self.webvr_thread = Some(webvr_thread)
}
FromCompositorMsg::WebVREvent(pipeline_ids, event) => {
debug!("constellation got WebVR event");
self.handle_webvr_event(pipeline_ids, event);
}
}
}
@ -1186,6 +1201,13 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
}
}
if let Some(chan) = self.webvr_thread.as_ref() {
debug!("Exiting WebVR thread.");
if let Err(e) = chan.send(WebVRMsg::Exit) {
warn!("Exit WebVR thread failed ({})", e);
}
}
debug!("Exiting font cache thread.");
self.font_cache_thread.exit();
@ -1274,6 +1296,18 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
}
}
fn handle_webvr_event(&mut self, ids: Vec<PipelineId>, event: WebVREventMsg) {
for id in ids {
match self.pipelines.get_mut(&id) {
Some(ref pipeline) => {
// Notify script thread
let _ = pipeline.event_loop.send(ConstellationControlMsg::WebVREvent(id, event.clone()));
},
None => warn!("constellation got webvr event for dead pipeline")
}
}
}
fn handle_init_load(&mut self, url: ServoUrl) {
let window_size = self.window_size.visible_viewport;
let root_pipeline_id = PipelineId::new();

View file

@ -41,6 +41,7 @@ extern crate servo_remutex;
extern crate servo_url;
extern crate style_traits;
extern crate webrender_traits;
extern crate webvr_traits;
mod constellation;
mod event_loop;

View file

@ -36,6 +36,7 @@ use std::rc::Rc;
use std::sync::mpsc::Sender;
use style_traits::{PagePx, ViewportPx};
use webrender_traits;
use webvr_traits::WebVRMsg;
/// A `Pipeline` is the constellation's view of a `Document`. Each pipeline has an
/// event loop (executed by a script thread) and a layout thread. A script thread
@ -169,6 +170,8 @@ pub struct InitialPipelineState {
/// Whether this pipeline is considered private.
pub is_private: bool,
/// A channel to the webvr thread.
pub webvr_thread: Option<IpcSender<WebVRMsg>>,
}
impl Pipeline {
@ -268,6 +271,7 @@ impl Pipeline {
script_content_process_shutdown_chan: script_content_process_shutdown_chan,
script_content_process_shutdown_port: script_content_process_shutdown_port,
webrender_api_sender: state.webrender_api_sender,
webvr_thread: state.webvr_thread,
};
// Spawn the child process.
@ -470,6 +474,7 @@ pub struct UnprivilegedPipelineContent {
script_content_process_shutdown_chan: IpcSender<()>,
script_content_process_shutdown_port: IpcReceiver<()>,
webrender_api_sender: webrender_traits::RenderApiSender,
webvr_thread: Option<IpcSender<WebVRMsg>>,
}
impl UnprivilegedPipelineContent {
@ -496,6 +501,7 @@ impl UnprivilegedPipelineContent {
window_size: self.window_size,
pipeline_namespace_id: self.pipeline_namespace_id,
content_process_shutdown_chan: self.script_content_process_shutdown_chan,
webvr_thread: self.webvr_thread
}, self.load_data.clone());
LTF::create(self.id,

View file

@ -151,6 +151,7 @@ impl Formattable for ProfilerCategory {
ProfilerCategory::ScriptServiceWorkerEvent => "Script Service Worker Event",
ProfilerCategory::ScriptEnterFullscreen => "Script Enter Fullscreen",
ProfilerCategory::ScriptExitFullscreen => "Script Exit Fullscreen",
ProfilerCategory::ScriptWebVREvent => "Script WebVR Event",
ProfilerCategory::ApplicationHeartbeat => "Application Heartbeat",
};
format!("{}{}", padding, name)

View file

@ -88,6 +88,7 @@ pub enum ProfilerCategory {
ScriptParseXML = 0x76,
ScriptEnterFullscreen = 0x77,
ScriptExitFullscreen = 0x78,
ScriptWebVREvent = 0x79,
ApplicationHeartbeat = 0x90,
}

View file

@ -82,6 +82,8 @@ url = {version = "1.2", features = ["heap_size", "query_encoding"]}
uuid = {version = "0.3.1", features = ["v4"]}
websocket = "0.17"
xml5ever = {version = "0.3.1", features = ["unstable"]}
webvr = {path = "../webvr"}
webvr_traits = {path = "../webvr_traits"}
[dependencies.webrender_traits]
git = "https://github.com/servo/webrender"

View file

@ -51,6 +51,9 @@ use js::jsapi::{JSObject, JSString, JS_GetArrayBufferViewType};
use js::jsapi::{JS_GetLatin1StringCharsAndLength, JS_GetObjectAsArrayBuffer, JS_GetObjectAsArrayBufferView};
use js::jsapi::{JS_GetReservedSlot, JS_GetTwoByteStringCharsAndLength};
use js::jsapi::{JS_IsArrayObject, JS_NewStringCopyN, JS_StringHasLatin1Chars};
use js::jsapi::{JS_NewFloat32Array, JS_NewFloat64Array};
use js::jsapi::{JS_NewInt8Array, JS_NewInt16Array, JS_NewInt32Array};
use js::jsapi::{JS_NewUint8Array, JS_NewUint16Array, JS_NewUint32Array};
use js::jsapi::{MutableHandleValue, Type};
use js::jsval::{ObjectValue, StringValue};
use js::rust::{ToString, get_object_class, is_dom_class, is_dom_object, maybe_wrap_value};
@ -463,6 +466,9 @@ pub unsafe trait ArrayBufferViewContents: Clone {
/// Check if the JS ArrayBufferView type is compatible with the implementor of the
/// trait
fn is_type_compatible(ty: Type) -> bool;
/// Creates a typed array
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject;
}
unsafe impl ArrayBufferViewContents for u8 {
@ -473,47 +479,79 @@ unsafe impl ArrayBufferViewContents for u8 {
_ => false,
}
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewUint8Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for i8 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int8 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewInt8Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for u16 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Uint16 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewUint16Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for i16 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int16 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewInt16Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for u32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Uint32 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewUint32Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for i32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int32 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewInt32Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for f32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Float32 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewFloat32Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for f64 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Float64 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewFloat64Array(cx, num)
}
}
/// Returns a mutable slice of the Array Buffer View data, viewed as T, without
@ -595,3 +633,23 @@ pub unsafe fn is_array_like(cx: *mut JSContext, value: HandleValue) -> bool {
assert!(JS_IsArrayObject(cx, value, &mut result));
result
}
/// Creates a typed JS array from a Rust slice
pub unsafe fn slice_to_array_buffer_view<T>(cx: *mut JSContext, data: &[T]) -> *mut JSObject
where T: ArrayBufferViewContents
{
let js_object = T::new(cx, data.len() as u32);
assert!(!js_object.is_null());
update_array_buffer_view(js_object, data);
js_object
}
/// Updates a typed JS array from a Rust slice
pub unsafe fn update_array_buffer_view<T>(obj: *mut JSObject, data: &[T])
where T: ArrayBufferViewContents
{
let mut buffer = array_buffer_view_data(obj);
if let Some(ref mut buffer) = buffer {
ptr::copy_nonoverlapping(&data[0], &mut buffer[0], data.len())
}
}

View file

@ -429,6 +429,15 @@ pub mod validation;
pub mod validitystate;
pub mod values;
pub mod virtualmethods;
pub mod vr;
pub mod vrdisplay;
pub mod vrdisplaycapabilities;
pub mod vrdisplayevent;
pub mod vreyeparameters;
pub mod vrfieldofview;
pub mod vrframedata;
pub mod vrpose;
pub mod vrstageparameters;
pub mod webgl_validations;
pub mod webglactiveinfo;
pub mod webglbuffer;

View file

@ -12,7 +12,9 @@ use dom::mimetypearray::MimeTypeArray;
use dom::navigatorinfo;
use dom::pluginarray::PluginArray;
use dom::serviceworkercontainer::ServiceWorkerContainer;
use dom::vr::VR;
use dom::window::Window;
use script_traits::WebVREventMsg;
#[dom_struct]
pub struct Navigator {
@ -21,6 +23,7 @@ pub struct Navigator {
plugins: MutNullableJS<PluginArray>,
mime_types: MutNullableJS<MimeTypeArray>,
service_worker: MutNullableJS<ServiceWorkerContainer>,
vr: MutNullableJS<VR>
}
impl Navigator {
@ -31,6 +34,7 @@ impl Navigator {
plugins: Default::default(),
mime_types: Default::default(),
service_worker: Default::default(),
vr: Default::default(),
}
}
@ -114,4 +118,16 @@ impl NavigatorMethods for Navigator {
true
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/webvr/#interface-navigator
fn Vr(&self) -> Root<VR> {
self.vr.or_init(|| VR::new(&self.global()))
}
}
impl Navigator {
pub fn handle_webvr_event(&self, event: WebVREventMsg) {
self.vr.get().expect("Shouldn't arrive here with an empty VR instance")
.handle_webvr_event(event);
}
}

160
components/script/dom/vr.rs Normal file
View file

@ -0,0 +1,160 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRBinding;
use dom::bindings::codegen::Bindings::VRBinding::VRMethods;
use dom::bindings::error::Error;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::promise::Promise;
use dom::vrdisplay::VRDisplay;
use dom::vrdisplayevent::VRDisplayEvent;
use ipc_channel::ipc;
use ipc_channel::ipc::IpcSender;
use script_traits::WebVREventMsg;
use std::rc::Rc;
use webvr_traits::WebVRMsg;
use webvr_traits::webvr;
#[dom_struct]
pub struct VR {
eventtarget: EventTarget,
displays: DOMRefCell<Vec<JS<VRDisplay>>>
}
impl VR {
fn new_inherited() -> VR {
VR {
eventtarget: EventTarget::new_inherited(),
displays: DOMRefCell::new(Vec::new())
}
}
pub fn new(global: &GlobalScope) -> Root<VR> {
let root = reflect_dom_object(box VR::new_inherited(),
global,
VRBinding::Wrap);
root.register();
root
}
}
impl Drop for VR {
fn drop(&mut self) {
self.unregister();
}
}
impl VRMethods for VR {
#[allow(unrooted_must_root)]
// https://w3c.github.io/webvr/#interface-navigator
fn GetDisplays(&self) -> Rc<Promise> {
let promise = Promise::new(&self.global());
if let Some(webvr_thread) = self.webvr_thread() {
let (sender, receiver) = ipc::channel().unwrap();
webvr_thread.send(WebVRMsg::GetDisplays(sender)).unwrap();
match receiver.recv().unwrap() {
Ok(displays) => {
// Sync displays
for display in displays {
self.sync_display(&display);
}
},
Err(e) => {
promise.reject_native(promise.global().get_cx(), &e);
return promise;
}
}
} else {
// WebVR spec: The Promise MUST be rejected if WebVR is not enabled/supported.
promise.reject_error(promise.global().get_cx(), Error::Security);
return promise;
}
// convert from JS to Root
let displays: Vec<Root<VRDisplay>> = self.displays.borrow().iter()
.map(|d| Root::from_ref(&**d))
.collect();
promise.resolve_native(promise.global().get_cx(), &displays);
promise
}
}
impl VR {
fn webvr_thread(&self) -> Option<IpcSender<WebVRMsg>> {
self.global().as_window().webvr_thread()
}
fn find_display(&self, display_id: u64) -> Option<Root<VRDisplay>> {
self.displays.borrow()
.iter()
.find(|d| d.get_display_id() == display_id)
.map(|d| Root::from_ref(&**d))
}
fn register(&self) {
if let Some(webvr_thread) = self.webvr_thread() {
let msg = WebVRMsg::RegisterContext(self.global().pipeline_id());
webvr_thread.send(msg).unwrap();
}
}
fn unregister(&self) {
if let Some(webvr_thread) = self.webvr_thread() {
let msg = WebVRMsg::UnregisterContext(self.global().pipeline_id());
webvr_thread.send(msg).unwrap();
}
}
fn sync_display(&self, display: &webvr::VRDisplayData) -> Root<VRDisplay> {
if let Some(existing) = self.find_display(display.display_id) {
existing.update_display(&display);
existing
} else {
let root = VRDisplay::new(&self.global(), display.clone());
self.displays.borrow_mut().push(JS::from_ref(&*root));
root
}
}
pub fn handle_webvr_event(&self, event: WebVREventMsg) {
let WebVREventMsg::DisplayEvent(event) = event;
match &event {
&webvr::VRDisplayEvent::Connect(ref display) => {
let display = self.sync_display(&display);
display.handle_webvr_event(&event);
self.notify_event(&display, &event);
},
&webvr::VRDisplayEvent::Disconnect(id) => {
if let Some(display) = self.find_display(id) {
display.handle_webvr_event(&event);
self.notify_event(&display, &event);
}
},
&webvr::VRDisplayEvent::Activate(ref display, _) |
&webvr::VRDisplayEvent::Deactivate(ref display, _) |
&webvr::VRDisplayEvent::Blur(ref display) |
&webvr::VRDisplayEvent::Focus(ref display) |
&webvr::VRDisplayEvent::PresentChange(ref display, _) |
&webvr::VRDisplayEvent::Change(ref display) => {
let display = self.sync_display(&display);
display.handle_webvr_event(&event);
}
};
}
fn notify_event(&self, display: &VRDisplay, event: &webvr::VRDisplayEvent) {
let event = VRDisplayEvent::new_from_webvr(&self.global(), &display, &event);
event.upcast::<Event>().fire(self.upcast());
}
}

View file

@ -0,0 +1,607 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::CanvasMsg;
use core::ops::Deref;
use dom::bindings::callback::ExceptionHandling;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::PerformanceBinding::PerformanceBinding::PerformanceMethods;
use dom::bindings::codegen::Bindings::VRDisplayBinding;
use dom::bindings::codegen::Bindings::VRDisplayBinding::VRDisplayMethods;
use dom::bindings::codegen::Bindings::VRDisplayBinding::VREye;
use dom::bindings::codegen::Bindings::VRLayerBinding::VRLayer;
use dom::bindings::codegen::Bindings::WindowBinding::FrameRequestCallback;
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{MutNullableJS, MutJS, Root};
use dom::bindings::num::Finite;
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::promise::Promise;
use dom::vrdisplaycapabilities::VRDisplayCapabilities;
use dom::vrdisplayevent::VRDisplayEvent;
use dom::vreyeparameters::VREyeParameters;
use dom::vrframedata::VRFrameData;
use dom::vrpose::VRPose;
use dom::vrstageparameters::VRStageParameters;
use dom::webglrenderingcontext::WebGLRenderingContext;
use ipc_channel::ipc;
use ipc_channel::ipc::{IpcSender, IpcReceiver};
use js::jsapi::JSContext;
use script_runtime::CommonScriptMsg;
use script_runtime::ScriptThreadEventCategory::WebVREvent;
use script_thread::Runnable;
use std::cell::Cell;
use std::mem;
use std::rc::Rc;
use std::sync::mpsc;
use std::thread;
use webrender_traits::VRCompositorCommand;
use webvr_traits::{WebVRDisplayData, WebVRDisplayEvent, WebVRFrameData, WebVRLayer, WebVRMsg};
#[dom_struct]
pub struct VRDisplay {
eventtarget: EventTarget,
#[ignore_heap_size_of = "Defined in rust-webvr"]
display: DOMRefCell<WebVRDisplayData>,
depth_near: Cell<f64>,
depth_far: Cell<f64>,
presenting: Cell<bool>,
left_eye_params: MutJS<VREyeParameters>,
right_eye_params: MutJS<VREyeParameters>,
capabilities: MutJS<VRDisplayCapabilities>,
stage_params: MutNullableJS<VRStageParameters>,
#[ignore_heap_size_of = "Defined in rust-webvr"]
frame_data: DOMRefCell<WebVRFrameData>,
#[ignore_heap_size_of = "Defined in rust-webvr"]
layer: DOMRefCell<WebVRLayer>,
layer_ctx: MutNullableJS<WebGLRenderingContext>,
#[ignore_heap_size_of = "Defined in rust-webvr"]
next_raf_id: Cell<u32>,
/// List of request animation frame callbacks
#[ignore_heap_size_of = "closures are hard"]
raf_callback_list: DOMRefCell<Vec<(u32, Option<Rc<FrameRequestCallback>>)>>,
// Compositor VRFrameData synchonization
frame_data_status: Cell<VRFrameDataStatus>,
#[ignore_heap_size_of = "channels are hard"]
frame_data_receiver: DOMRefCell<Option<IpcReceiver<Result<Vec<u8>, ()>>>>,
}
unsafe_no_jsmanaged_fields!(WebVRDisplayData);
unsafe_no_jsmanaged_fields!(WebVRFrameData);
unsafe_no_jsmanaged_fields!(WebVRLayer);
#[derive(Clone, Copy, PartialEq, Eq, HeapSizeOf)]
enum VRFrameDataStatus {
Waiting,
Synced,
Exit
}
unsafe_no_jsmanaged_fields!(VRFrameDataStatus);
impl VRDisplay {
fn new_inherited(global: &GlobalScope, display: WebVRDisplayData) -> VRDisplay {
let stage = match display.stage_parameters {
Some(ref params) => Some(VRStageParameters::new(params.clone(), &global)),
None => None
};
VRDisplay {
eventtarget: EventTarget::new_inherited(),
display: DOMRefCell::new(display.clone()),
depth_near: Cell::new(0.01),
depth_far: Cell::new(10000.0),
presenting: Cell::new(false),
left_eye_params: MutJS::new(&*VREyeParameters::new(display.left_eye_parameters.clone(), &global)),
right_eye_params: MutJS::new(&*VREyeParameters::new(display.right_eye_parameters.clone(), &global)),
capabilities: MutJS::new(&*VRDisplayCapabilities::new(display.capabilities.clone(), &global)),
stage_params: MutNullableJS::new(stage.as_ref().map(|v| v.deref())),
frame_data: DOMRefCell::new(Default::default()),
layer: DOMRefCell::new(Default::default()),
layer_ctx: MutNullableJS::default(),
next_raf_id: Cell::new(1),
raf_callback_list: DOMRefCell::new(vec![]),
frame_data_status: Cell::new(VRFrameDataStatus::Waiting),
frame_data_receiver: DOMRefCell::new(None),
}
}
pub fn new(global: &GlobalScope, display: WebVRDisplayData) -> Root<VRDisplay> {
reflect_dom_object(box VRDisplay::new_inherited(&global, display),
global,
VRDisplayBinding::Wrap)
}
}
impl Drop for VRDisplay {
fn drop(&mut self) {
if self.presenting.get() {
self.force_stop_present();
}
}
}
impl VRDisplayMethods for VRDisplay {
// https://w3c.github.io/webvr/#dom-vrdisplay-isconnected
fn IsConnected(&self) -> bool {
self.display.borrow().connected
}
// https://w3c.github.io/webvr/#dom-vrdisplay-ispresenting
fn IsPresenting(&self) -> bool {
self.presenting.get()
}
// https://w3c.github.io/webvr/#dom-vrdisplay-capabilities
fn Capabilities(&self) -> Root<VRDisplayCapabilities> {
Root::from_ref(&*self.capabilities.get())
}
// https://w3c.github.io/webvr/#dom-vrdisplay-stageparameters
fn GetStageParameters(&self) -> Option<Root<VRStageParameters>> {
self.stage_params.get().map(|s| Root::from_ref(&*s))
}
// https://w3c.github.io/webvr/#dom-vrdisplay-geteyeparameters
fn GetEyeParameters(&self, eye: VREye) -> Root<VREyeParameters> {
match eye {
VREye::Left => Root::from_ref(&*self.left_eye_params.get()),
VREye::Right => Root::from_ref(&*self.right_eye_params.get())
}
}
// https://w3c.github.io/webvr/#dom-vrdisplay-displayid
fn DisplayId(&self) -> u32 {
self.display.borrow().display_id as u32
}
// https://w3c.github.io/webvr/#dom-vrdisplay-displayname
fn DisplayName(&self) -> DOMString {
DOMString::from(self.display.borrow().display_name.clone())
}
// https://w3c.github.io/webvr/#dom-vrdisplay-getframedata-framedata-framedata
fn GetFrameData(&self, frameData: &VRFrameData) -> bool {
// If presenting we use a synced data with compositor for the whole frame
if self.presenting.get() {
if self.frame_data_status.get() == VRFrameDataStatus::Waiting {
self.sync_frame_data();
}
frameData.update(& self.frame_data.borrow());
return true;
}
// If not presenting we fetch inmediante VRFrameData
let (sender, receiver) = ipc::channel().unwrap();
self.webvr_thread().send(WebVRMsg::GetFrameData(self.global().pipeline_id(),
self.get_display_id(),
self.depth_near.get(),
self.depth_far.get(),
sender)).unwrap();
return match receiver.recv().unwrap() {
Ok(data) => {
frameData.update(&data);
true
},
Err(e) => {
error!("WebVR::GetFrameData: {:?}", e);
false
}
};
}
// https://w3c.github.io/webvr/#dom-vrdisplay-getpose
fn GetPose(&self) -> Root<VRPose> {
VRPose::new(&self.global(), &self.frame_data.borrow().pose)
}
// https://w3c.github.io/webvr/#dom-vrdisplay-resetpose
fn ResetPose(&self) -> () {
let (sender, receiver) = ipc::channel().unwrap();
self.webvr_thread().send(WebVRMsg::ResetPose(self.global().pipeline_id(),
self.get_display_id(),
sender)).unwrap();
if let Ok(data) = receiver.recv().unwrap() {
// Some VRDisplay data might change after calling ResetPose()
*self.display.borrow_mut() = data;
}
}
// https://w3c.github.io/webvr/#dom-vrdisplay-depthnear
fn DepthNear(&self) -> Finite<f64> {
Finite::wrap(self.depth_near.get())
}
// https://w3c.github.io/webvr/#dom-vrdisplay-depthnear
fn SetDepthNear(&self, value: Finite<f64>) -> () {
self.depth_near.set(*value.deref());
}
// https://w3c.github.io/webvr/#dom-vrdisplay-depthfar
fn DepthFar(&self) -> Finite<f64> {
Finite::wrap(self.depth_far.get())
}
// https://w3c.github.io/webvr/#dom-vrdisplay-depthfar
fn SetDepthFar(&self, value: Finite<f64>) -> () {
self.depth_far.set(*value.deref());
}
// https://w3c.github.io/webvr/#dom-vrdisplay-requestanimationframe
fn RequestAnimationFrame(&self, callback: Rc<FrameRequestCallback>) -> u32 {
if self.presenting.get() {
let raf_id = self.next_raf_id.get();
self.next_raf_id.set(raf_id + 1);
self.raf_callback_list.borrow_mut().push((raf_id, Some(callback)));
raf_id
} else {
// WebVR spec: When a VRDisplay is not presenting it should
// fallback to window.requestAnimationFrame.
self.global().as_window().RequestAnimationFrame(callback)
}
}
// https://w3c.github.io/webvr/#dom-vrdisplay-cancelanimationframe
fn CancelAnimationFrame(&self, handle: u32) -> () {
if self.presenting.get() {
let mut list = self.raf_callback_list.borrow_mut();
if let Some(mut pair) = list.iter_mut().find(|pair| pair.0 == handle) {
pair.1 = None;
}
} else {
// WebVR spec: When a VRDisplay is not presenting it should
// fallback to window.cancelAnimationFrame.
self.global().as_window().CancelAnimationFrame(handle);
}
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/webvr/#dom-vrdisplay-requestpresent
fn RequestPresent(&self, layers: Vec<VRLayer>) -> Rc<Promise> {
let promise = Promise::new(&self.global());
// TODO: WebVR spec: this method must be called in response to a user gesture
// WebVR spec: If canPresent is false the promise MUST be rejected
if !self.display.borrow().capabilities.can_present {
let msg = "VRDisplay canPresent is false".to_string();
promise.reject_native(promise.global().get_cx(), &msg);
return promise;
}
// Current WebVRSpec only allows 1 VRLayer if the VRDevice can present.
// Future revisions of this spec may allow multiple layers to enable more complex rendering effects
// such as compositing WebGL and DOM elements together.
// That functionality is not allowed by this revision of the spec.
if layers.len() != 1 {
let msg = "The number of layers must be 1".to_string();
promise.reject_native(promise.global().get_cx(), &msg);
return promise;
}
// Parse and validate received VRLayer
let layer = validate_layer(self.global().get_cx(), &layers[0]);
let layer_bounds;
let layer_ctx;
match layer {
Ok((bounds, ctx)) => {
layer_bounds = bounds;
layer_ctx = ctx;
},
Err(msg) => {
let msg = msg.to_string();
promise.reject_native(promise.global().get_cx(), &msg);
return promise;
}
};
// WebVR spec: Repeat calls while already presenting will update the VRLayers being displayed.
if self.presenting.get() {
*self.layer.borrow_mut() = layer_bounds;
self.layer_ctx.set(Some(&layer_ctx));
promise.resolve_native(promise.global().get_cx(), &());
return promise;
}
// Request Present
let (sender, receiver) = ipc::channel().unwrap();
self.webvr_thread().send(WebVRMsg::RequestPresent(self.global().pipeline_id(),
self.display.borrow().display_id,
sender))
.unwrap();
match receiver.recv().unwrap() {
Ok(()) => {
*self.layer.borrow_mut() = layer_bounds;
self.layer_ctx.set(Some(&layer_ctx));
self.init_present();
promise.resolve_native(promise.global().get_cx(), &());
},
Err(e) => {
promise.reject_native(promise.global().get_cx(), &e);
}
}
promise
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/webvr/#dom-vrdisplay-exitpresent
fn ExitPresent(&self) -> Rc<Promise> {
let promise = Promise::new(&self.global());
// WebVR spec: If the VRDisplay is not presenting the promise MUST be rejected.
if !self.presenting.get() {
let msg = "VRDisplay is not presenting".to_string();
promise.reject_native(promise.global().get_cx(), &msg);
return promise;
}
// Exit present
let (sender, receiver) = ipc::channel().unwrap();
self.webvr_thread().send(WebVRMsg::ExitPresent(self.global().pipeline_id(),
self.display.borrow().display_id,
Some(sender)))
.unwrap();
match receiver.recv().unwrap() {
Ok(()) => {
self.stop_present();
promise.resolve_native(promise.global().get_cx(), &());
},
Err(e) => {
promise.reject_native(promise.global().get_cx(), &e);
}
}
promise
}
// https://w3c.github.io/webvr/#dom-vrdisplay-submitframe
fn SubmitFrame(&self) -> () {
if !self.presenting.get() {
warn!("VRDisplay not presenting");
return;
}
let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
let display_id = self.display.borrow().display_id;
let layer = self.layer.borrow();
let msg = VRCompositorCommand::SubmitFrame(display_id, layer.left_bounds, layer.right_bounds);
api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
}
}
impl VRDisplay {
fn webvr_thread(&self) -> IpcSender<WebVRMsg> {
self.global().as_window().webvr_thread().expect("Shouldn't arrive here with WebVR disabled")
}
pub fn get_display_id(&self) -> u64 {
self.display.borrow().display_id
}
pub fn update_display(&self, display: &WebVRDisplayData) {
*self.display.borrow_mut() = display.clone();
if let Some(ref stage) = display.stage_parameters {
if self.stage_params.get().is_none() {
let params = Some(VRStageParameters::new(stage.clone(), &self.global()));
self.stage_params.set(params.as_ref().map(|v| v.deref()));
} else {
self.stage_params.get().unwrap().update(&stage);
}
} else {
self.stage_params.set(None);
}
}
pub fn handle_webvr_event(&self, event: &WebVRDisplayEvent) {
match *event {
WebVRDisplayEvent::Connect(ref display) => {
self.update_display(&display);
},
WebVRDisplayEvent::Disconnect(_id) => {
self.display.borrow_mut().connected = false;
},
WebVRDisplayEvent::Activate(ref display, _) |
WebVRDisplayEvent::Deactivate(ref display, _) |
WebVRDisplayEvent::Blur(ref display) |
WebVRDisplayEvent::Focus(ref display) => {
self.update_display(&display);
self.notify_event(&event);
},
WebVRDisplayEvent::PresentChange(ref display, presenting) => {
self.update_display(&display);
self.presenting.set(presenting);
self.notify_event(&event);
},
WebVRDisplayEvent::Change(ref display) => {
// Change event doesn't exist in WebVR spec.
// So we update display data but don't notify JS.
self.update_display(&display);
}
};
}
fn notify_event(&self, event: &WebVRDisplayEvent) {
let root = Root::from_ref(&*self);
let event = VRDisplayEvent::new_from_webvr(&self.global(), &root, &event);
event.upcast::<Event>().fire(self.upcast());
}
fn init_present(&self) {
self.presenting.set(true);
let (sync_sender, sync_receiver) = ipc::channel().unwrap();
*self.frame_data_receiver.borrow_mut() = Some(sync_receiver);
let display_id = self.display.borrow().display_id;
let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
let js_sender = self.global().script_chan();
let address = Trusted::new(&*self);
let near_init = self.depth_near.get();
let far_init = self.depth_far.get();
// The render loop at native headset frame rate is implemented using a dedicated thread.
// Every loop iteration syncs pose data with the HMD, submits the pixels to the display and waits for Vsync.
// Both the requestAnimationFrame call of a VRDisplay in the JavaScript thread and the VRSyncPoses call
// in the Webrender thread are executed in parallel. This allows to get some JavaScript code executed ahead.
// while the render thread is syncing the VRFrameData to be used for the current frame.
// This thread runs until the user calls ExitPresent, the tab is closed or some unexpected error happened.
thread::Builder::new().name("WebVR_RAF".into()).spawn(move || {
let (raf_sender, raf_receiver) = mpsc::channel();
let mut near = near_init;
let mut far = far_init;
// Initialize compositor
api_sender.send(CanvasMsg::WebVR(VRCompositorCommand::Create(display_id))).unwrap();
loop {
// Run RAF callbacks on JavaScript thread
let msg = box NotifyDisplayRAF {
address: address.clone(),
sender: raf_sender.clone()
};
js_sender.send(CommonScriptMsg::RunnableMsg(WebVREvent, msg)).unwrap();
// Run Sync Poses in parallell on Render thread
let msg = VRCompositorCommand::SyncPoses(display_id, near, far, sync_sender.clone());
api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
// Wait until both SyncPoses & RAF ends
if let Ok(depth) = raf_receiver.recv().unwrap() {
near = depth.0;
far = depth.1;
} else {
// Stop thread
// ExitPresent called or some error happened
return;
}
}
}).expect("Thread spawning failed");
}
fn stop_present(&self) {
self.presenting.set(false);
*self.frame_data_receiver.borrow_mut() = None;
let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
let display_id = self.display.borrow().display_id;
let msg = VRCompositorCommand::Release(display_id);
api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
}
// Only called when the JSContext is destroyed while presenting.
// In this case we don't want to wait for WebVR Thread response.
fn force_stop_present(&self) {
self.webvr_thread().send(WebVRMsg::ExitPresent(self.global().pipeline_id(),
self.display.borrow().display_id,
None))
.unwrap();
self.stop_present();
}
fn sync_frame_data(&self) {
let status = if let Some(receiver) = self.frame_data_receiver.borrow().as_ref() {
match receiver.recv().unwrap() {
Ok(bytes) => {
*self.frame_data.borrow_mut() = WebVRFrameData::from_bytes(&bytes[..]);
VRFrameDataStatus::Synced
},
Err(()) => {
VRFrameDataStatus::Exit
}
}
} else {
VRFrameDataStatus::Exit
};
self.frame_data_status.set(status);
}
fn handle_raf(&self, end_sender: &mpsc::Sender<Result<(f64, f64), ()>>) {
self.frame_data_status.set(VRFrameDataStatus::Waiting);
let mut callbacks = mem::replace(&mut *self.raf_callback_list.borrow_mut(), vec![]);
let now = self.global().as_window().Performance().Now();
// Call registered VRDisplay.requestAnimationFrame callbacks.
for (_, callback) in callbacks.drain(..) {
if let Some(callback) = callback {
let _ = callback.Call__(Finite::wrap(*now), ExceptionHandling::Report);
}
}
if self.frame_data_status.get() == VRFrameDataStatus::Waiting {
// User didn't call getFrameData while presenting.
// We automatically reads the pending VRFrameData to avoid overflowing the IPC-Channel buffers.
// Show a warning as the WebVR Spec recommends.
warn!("WebVR: You should call GetFrameData while presenting");
self.sync_frame_data();
}
match self.frame_data_status.get() {
VRFrameDataStatus::Synced => {
// Sync succeeded. Notify RAF thread.
end_sender.send(Ok((self.depth_near.get(), self.depth_far.get()))).unwrap();
},
VRFrameDataStatus::Exit | VRFrameDataStatus::Waiting => {
// ExitPresent called or some error ocurred.
// Notify VRDisplay RAF thread to stop.
end_sender.send(Err(())).unwrap();
}
}
}
}
struct NotifyDisplayRAF {
address: Trusted<VRDisplay>,
sender: mpsc::Sender<Result<(f64, f64), ()>>
}
impl Runnable for NotifyDisplayRAF {
fn name(&self) -> &'static str { "NotifyDisplayRAF" }
fn handler(self: Box<Self>) {
let display = self.address.root();
display.handle_raf(&self.sender);
}
}
// WebVR Spect: If the number of values in the leftBounds/rightBounds arrays
// is not 0 or 4 for any of the passed layers the promise is rejected
fn parse_bounds(src: &Option<Vec<Finite<f32>>>, dst: &mut [f32; 4]) -> Result<(), &'static str> {
match *src {
Some(ref values) => {
if values.len() == 0 {
return Ok(())
}
if values.len() != 4 {
return Err("The number of values in the leftBounds/rightBounds arrays must be 0 or 4")
}
for i in 0..4 {
dst[i] = *values[i].deref();
}
Ok(())
},
None => Ok(())
}
}
fn validate_layer(cx: *mut JSContext,
layer: &VRLayer)
-> Result<(WebVRLayer, Root<WebGLRenderingContext>), &'static str> {
let ctx = layer.source.as_ref().map(|ref s| s.get_or_init_webgl_context(cx, None)).unwrap_or(None);
if let Some(ctx) = ctx {
let mut data = WebVRLayer::default();
try!(parse_bounds(&layer.leftBounds, &mut data.left_bounds));
try!(parse_bounds(&layer.rightBounds, &mut data.right_bounds));
Ok((data, ctx))
} else {
Err("VRLayer source must be a WebGL Context")
}
}

View file

@ -0,0 +1,62 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRDisplayCapabilitiesBinding;
use dom::bindings::codegen::Bindings::VRDisplayCapabilitiesBinding::VRDisplayCapabilitiesMethods;
use dom::bindings::js::Root;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use webvr_traits::WebVRDisplayCapabilities;
#[dom_struct]
pub struct VRDisplayCapabilities {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
capabilities: DOMRefCell<WebVRDisplayCapabilities>
}
unsafe_no_jsmanaged_fields!(WebVRDisplayCapabilities);
impl VRDisplayCapabilities {
fn new_inherited(capabilities: WebVRDisplayCapabilities) -> VRDisplayCapabilities {
VRDisplayCapabilities {
reflector_: Reflector::new(),
capabilities: DOMRefCell::new(capabilities)
}
}
pub fn new(capabilities: WebVRDisplayCapabilities, global: &GlobalScope) -> Root<VRDisplayCapabilities> {
reflect_dom_object(box VRDisplayCapabilities::new_inherited(capabilities),
global,
VRDisplayCapabilitiesBinding::Wrap)
}
}
impl VRDisplayCapabilitiesMethods for VRDisplayCapabilities {
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasposition
fn HasPosition(&self) -> bool {
self.capabilities.borrow().has_position
}
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasorientation
fn HasOrientation(&self) -> bool {
self.capabilities.borrow().has_orientation
}
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasexternaldisplay
fn HasExternalDisplay(&self) -> bool {
self.capabilities.borrow().has_external_display
}
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-canpresent
fn CanPresent(&self) -> bool {
self.capabilities.borrow().can_present
}
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-maxlayers
fn MaxLayers(&self) -> u32 {
if self.CanPresent() { 1 } else { 0 }
}
}

View file

@ -0,0 +1,116 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::VRDisplayEventBinding;
use dom::bindings::codegen::Bindings::VRDisplayEventBinding::VRDisplayEventMethods;
use dom::bindings::codegen::Bindings::VRDisplayEventBinding::VRDisplayEventReason;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::event::Event;
use dom::globalscope::GlobalScope;
use dom::vrdisplay::VRDisplay;
use dom::window::Window;
use servo_atoms::Atom;
use webvr_traits::{WebVRDisplayEvent, WebVRDisplayEventReason};
#[dom_struct]
pub struct VRDisplayEvent {
event: Event,
display: JS<VRDisplay>,
reason: Option<VRDisplayEventReason>
}
impl VRDisplayEvent {
fn new_inherited(display: &VRDisplay,
reason: Option<VRDisplayEventReason>)
-> VRDisplayEvent {
VRDisplayEvent {
event: Event::new_inherited(),
display: JS::from_ref(display),
reason: reason.clone()
}
}
pub fn new(global: &GlobalScope,
type_: Atom,
bubbles: bool,
cancelable: bool,
display: &VRDisplay,
reason: Option<VRDisplayEventReason>)
-> Root<VRDisplayEvent> {
let ev = reflect_dom_object(box VRDisplayEvent::new_inherited(&display, reason),
global,
VRDisplayEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev
}
pub fn new_from_webvr(global: &GlobalScope,
display: &VRDisplay,
event: &WebVRDisplayEvent)
-> Root<VRDisplayEvent> {
let (name, reason) = match *event {
WebVRDisplayEvent::Connect(_) => ("displayconnect", None),
WebVRDisplayEvent::Disconnect(_) => ("displaydisconnect", None),
WebVRDisplayEvent::Activate(_, reason) => ("activate", Some(reason)),
WebVRDisplayEvent::Deactivate(_, reason) => ("deactivate", Some(reason)),
WebVRDisplayEvent::Blur(_) => ("blur", None),
WebVRDisplayEvent::Focus(_) => ("focus", None),
WebVRDisplayEvent::PresentChange(_, _) => ("presentchange", None),
WebVRDisplayEvent::Change(_) => panic!("VRDisplayEvent:Change event not available in WebVR")
};
// map to JS enum values
let reason = reason.map(|r| {
match r {
WebVRDisplayEventReason::Navigation => VRDisplayEventReason::Navigation,
WebVRDisplayEventReason::Mounted => VRDisplayEventReason::Mounted,
WebVRDisplayEventReason::Unmounted => VRDisplayEventReason::Unmounted,
}
});
VRDisplayEvent::new(&global,
Atom::from(DOMString::from(name)),
false,
false,
&display,
reason)
}
pub fn Constructor(window: &Window,
type_: DOMString,
init: &VRDisplayEventBinding::VRDisplayEventInit)
-> Fallible<Root<VRDisplayEvent>> {
Ok(VRDisplayEvent::new(&window.global(),
Atom::from(type_),
init.parent.bubbles,
init.parent.cancelable,
&init.display,
init.reason))
}
}
impl VRDisplayEventMethods for VRDisplayEvent {
// https://w3c.github.io/webvr/#dom-vrdisplayevent-display
fn Display(&self) -> Root<VRDisplay> {
Root::from_ref(&*self.display)
}
// https://w3c.github.io/webvr/#enumdef-vrdisplayeventreason
fn GetReason(&self) -> Option<VRDisplayEventReason> {
self.reason
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}

View file

@ -0,0 +1,75 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VREyeParametersBinding;
use dom::bindings::codegen::Bindings::VREyeParametersBinding::VREyeParametersMethods;
use dom::bindings::conversions::slice_to_array_buffer_view;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use dom::vrfieldofview::VRFieldOfView;
use js::jsapi::{Heap, JSContext, JSObject};
use std::default::Default;
use webvr_traits::WebVREyeParameters;
#[dom_struct]
pub struct VREyeParameters {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
parameters: DOMRefCell<WebVREyeParameters>,
offset: Heap<*mut JSObject>,
fov: JS<VRFieldOfView>,
}
unsafe_no_jsmanaged_fields!(WebVREyeParameters);
impl VREyeParameters {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new_inherited(parameters: WebVREyeParameters, global: &GlobalScope) -> VREyeParameters {
let fov = VRFieldOfView::new(&global, parameters.field_of_view.clone());
let mut result = VREyeParameters {
reflector_: Reflector::new(),
parameters: DOMRefCell::new(parameters),
offset: Heap::default(),
fov: JS::from_ref(&*fov)
};
unsafe {
result.offset.set(slice_to_array_buffer_view(global.get_cx(), &result.parameters.borrow().offset));
}
result
}
pub fn new(parameters: WebVREyeParameters, global: &GlobalScope) -> Root<VREyeParameters> {
reflect_dom_object(box VREyeParameters::new_inherited(parameters, global),
global,
VREyeParametersBinding::Wrap)
}
}
impl VREyeParametersMethods for VREyeParameters {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vreyeparameters-offset
unsafe fn Offset(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.offset.get())
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-fieldofview
fn FieldOfView(&self) -> Root<VRFieldOfView> {
Root::from_ref(&*self.fov)
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderwidth
fn RenderWidth(&self) -> u32 {
self.parameters.borrow().render_width
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderheight
fn RenderHeight(&self) -> u32 {
self.parameters.borrow().render_height
}
}

View file

@ -0,0 +1,58 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRFieldOfViewBinding;
use dom::bindings::codegen::Bindings::VRFieldOfViewBinding::VRFieldOfViewMethods;
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use webvr_traits::WebVRFieldOfView;
#[dom_struct]
pub struct VRFieldOfView {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
fov: DOMRefCell<WebVRFieldOfView>
}
unsafe_no_jsmanaged_fields!(WebVRFieldOfView);
impl VRFieldOfView {
fn new_inherited(fov: WebVRFieldOfView) -> VRFieldOfView {
VRFieldOfView {
reflector_: Reflector::new(),
fov: DOMRefCell::new(fov)
}
}
pub fn new(global: &GlobalScope, fov: WebVRFieldOfView) -> Root<VRFieldOfView> {
reflect_dom_object(box VRFieldOfView::new_inherited(fov),
global,
VRFieldOfViewBinding::Wrap)
}
}
impl VRFieldOfViewMethods for VRFieldOfView {
// https://w3c.github.io/webvr/#interface-interface-vrfieldofview
fn UpDegrees(&self) -> Finite<f64> {
Finite::wrap(self.fov.borrow().up_degrees)
}
// https://w3c.github.io/webvr/#interface-interface-vrfieldofview
fn RightDegrees(&self) -> Finite<f64> {
Finite::wrap(self.fov.borrow().right_degrees)
}
// https://w3c.github.io/webvr/#interface-interface-vrfieldofview
fn DownDegrees(&self) -> Finite<f64> {
Finite::wrap(self.fov.borrow().down_degrees)
}
// https://w3c.github.io/webvr/#interface-interface-vrfieldofview
fn LeftDegrees(&self) -> Finite<f64> {
Finite::wrap(self.fov.borrow().left_degrees)
}
}

View file

@ -0,0 +1,122 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::codegen::Bindings::VRFrameDataBinding;
use dom::bindings::codegen::Bindings::VRFrameDataBinding::VRFrameDataMethods;
use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
use dom::bindings::error::Fallible;
use dom::bindings::js::{JS, Root};
use dom::bindings::num::Finite;
use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use dom::vrpose::VRPose;
use dom::window::Window;
use js::jsapi::{Heap, JSContext, JSObject};
use std::cell::Cell;
use webvr_traits::WebVRFrameData;
#[dom_struct]
pub struct VRFrameData {
reflector_: Reflector,
left_proj: Heap<*mut JSObject>,
left_view: Heap<*mut JSObject>,
right_proj: Heap<*mut JSObject>,
right_view: Heap<*mut JSObject>,
pose: JS<VRPose>,
timestamp: Cell<f64>,
first_timestamp: Cell<f64>
}
impl VRFrameData {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new(global: &GlobalScope) -> Root<VRFrameData> {
let matrix = [1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0f32];
let pose = VRPose::new(&global, &Default::default());
let mut framedata = VRFrameData {
reflector_: Reflector::new(),
left_proj: Heap::default(),
left_view: Heap::default(),
right_proj: Heap::default(),
right_view: Heap::default(),
pose: JS::from_ref(&*pose),
timestamp: Cell::new(0.0),
first_timestamp: Cell::new(0.0)
};
unsafe {
framedata.left_proj.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
framedata.left_view.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
framedata.right_proj.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
framedata.right_view.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
}
reflect_dom_object(box framedata,
global,
VRFrameDataBinding::Wrap)
}
pub fn Constructor(window: &Window) -> Fallible<Root<VRFrameData>> {
Ok(VRFrameData::new(&window.global()))
}
}
impl VRFrameData {
#[allow(unsafe_code)]
pub fn update(&self, data: &WebVRFrameData) {
unsafe {
update_array_buffer_view(self.left_proj.get(), &data.left_projection_matrix);
update_array_buffer_view(self.left_view.get(), &data.left_view_matrix);
update_array_buffer_view(self.right_proj.get(), &data.right_projection_matrix);
update_array_buffer_view(self.right_view.get(), &data.right_view_matrix);
}
self.pose.update(&data.pose);
self.timestamp.set(data.timestamp);
if self.first_timestamp.get() == 0.0 {
self.first_timestamp.set(data.timestamp);
}
}
}
impl VRFrameDataMethods for VRFrameData {
// https://w3c.github.io/webvr/#dom-vrframedata-timestamp
fn Timestamp(&self) -> Finite<f64> {
Finite::wrap(self.timestamp.get() - self.first_timestamp.get())
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrframedata-leftprojectionmatrix
unsafe fn LeftProjectionMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.left_proj.get())
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrframedata-leftviewmatrix
unsafe fn LeftViewMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.left_view.get())
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrframedata-rightprojectionmatrix
unsafe fn RightProjectionMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.right_proj.get())
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrframedata-rightviewmatrix
unsafe fn RightViewMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.right_view.get())
}
// https://w3c.github.io/webvr/#dom-vrframedata-pose
fn Pose(&self) -> Root<VRPose> {
Root::from_ref(&*self.pose)
}
}

View file

@ -0,0 +1,133 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRPoseBinding;
use dom::bindings::codegen::Bindings::VRPoseBinding::VRPoseMethods;
use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
use dom::bindings::js::Root;
use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use js::jsapi::{Heap, JSContext, JSObject};
use std::ptr;
use webvr_traits::webvr;
#[dom_struct]
pub struct VRPose {
reflector_: Reflector,
position: DOMRefCell<Heap<*mut JSObject>>,
orientation: DOMRefCell<Heap<*mut JSObject>>,
linear_vel: DOMRefCell<Heap<*mut JSObject>>,
angular_vel: DOMRefCell<Heap<*mut JSObject>>,
linear_acc: DOMRefCell<Heap<*mut JSObject>>,
angular_acc: DOMRefCell<Heap<*mut JSObject>>
}
#[allow(unsafe_code)]
unsafe fn update_or_create_typed_array(cx: *mut JSContext,
src: Option<&[f32]>,
dst: &DOMRefCell<Heap<*mut JSObject>>) {
let mut dst = dst.borrow_mut();
match src {
Some(ref data) => {
if dst.get().is_null() {
dst.set(slice_to_array_buffer_view(cx, &data));
} else {
update_array_buffer_view(dst.get(), &data);
}
},
None => {
if !dst.get().is_null() {
dst.set(ptr::null_mut());
}
}
}
}
#[inline]
#[allow(unsafe_code)]
fn heap_to_option(heap: &DOMRefCell<Heap<*mut JSObject>>) -> Option<NonZero<*mut JSObject>> {
let js_object = heap.borrow_mut().get();
if js_object.is_null() {
None
} else {
unsafe {
Some(NonZero::new(js_object))
}
}
}
impl VRPose {
fn new_inherited() -> VRPose {
VRPose {
reflector_: Reflector::new(),
position: DOMRefCell::new(Heap::default()),
orientation: DOMRefCell::new(Heap::default()),
linear_vel: DOMRefCell::new(Heap::default()),
angular_vel: DOMRefCell::new(Heap::default()),
linear_acc: DOMRefCell::new(Heap::default()),
angular_acc: DOMRefCell::new(Heap::default())
}
}
pub fn new(global: &GlobalScope, pose: &webvr::VRPose) -> Root<VRPose> {
let root = reflect_dom_object(box VRPose::new_inherited(),
global,
VRPoseBinding::Wrap);
root.update(&pose);
root
}
#[allow(unsafe_code)]
pub fn update(&self, pose: &webvr::VRPose) {
let cx = self.global().get_cx();
unsafe {
update_or_create_typed_array(cx, pose.position.as_ref().map(|v| &v[..]), &self.position);
update_or_create_typed_array(cx, pose.orientation.as_ref().map(|v| &v[..]), &self.orientation);
update_or_create_typed_array(cx, pose.linear_velocity.as_ref().map(|v| &v[..]), &self.linear_vel);
update_or_create_typed_array(cx, pose.angular_velocity.as_ref().map(|v| &v[..]), &self.angular_vel);
update_or_create_typed_array(cx, pose.linear_acceleration.as_ref().map(|v| &v[..]), &self.linear_acc);
update_or_create_typed_array(cx, pose.angular_acceleration.as_ref().map(|v| &v[..]), &self.angular_acc);
}
}
}
impl VRPoseMethods for VRPose {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-position
unsafe fn GetPosition(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.position)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-linearvelocity
unsafe fn GetLinearVelocity(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.linear_vel)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-linearacceleration
unsafe fn GetLinearAcceleration(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.linear_acc)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-orientation
unsafe fn GetOrientation(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.orientation)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-angularvelocity
unsafe fn GetAngularVelocity(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.angular_vel)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-angularacceleration
unsafe fn GetAngularAcceleration(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.angular_acc)
}
}

View file

@ -0,0 +1,75 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRStageParametersBinding;
use dom::bindings::codegen::Bindings::VRStageParametersBinding::VRStageParametersMethods;
use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use js::jsapi::{Heap, JSContext, JSObject};
use webvr_traits::WebVRStageParameters;
#[dom_struct]
pub struct VRStageParameters {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
parameters: DOMRefCell<WebVRStageParameters>,
transform: Heap<*mut JSObject>,
}
unsafe_no_jsmanaged_fields!(WebVRStageParameters);
impl VRStageParameters {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new_inherited(parameters: WebVRStageParameters, global: &GlobalScope) -> VRStageParameters {
let mut stage = VRStageParameters {
reflector_: Reflector::new(),
parameters: DOMRefCell::new(parameters),
transform: Heap::default()
};
unsafe {
stage.transform.set(slice_to_array_buffer_view(global.get_cx(),
&stage.parameters.borrow().sitting_to_standing_transform));
}
stage
}
pub fn new(parameters: WebVRStageParameters, global: &GlobalScope) -> Root<VRStageParameters> {
reflect_dom_object(box VRStageParameters::new_inherited(parameters, global),
global,
VRStageParametersBinding::Wrap)
}
#[allow(unsafe_code)]
pub fn update(&self, parameters: &WebVRStageParameters) {
unsafe {
update_array_buffer_view(self.transform.get(), &parameters.sitting_to_standing_transform);
}
*self.parameters.borrow_mut() = parameters.clone();
}
}
impl VRStageParametersMethods for VRStageParameters {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrstageparameters-sittingtostandingtransform
unsafe fn SittingToStandingTransform(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.transform.get())
}
// https://w3c.github.io/webvr/#dom-vrstageparameters-sizex
fn SizeX(&self) -> Finite<f32> {
Finite::wrap(self.parameters.borrow().size_x)
}
// https://w3c.github.io/webvr/#dom-vrstageparameters-sizez
fn SizeZ(&self) -> Finite<f32> {
Finite::wrap(self.parameters.borrow().size_z)
}
}

View file

@ -57,3 +57,8 @@ interface NavigatorPlugins {
interface NavigatorCookies {
readonly attribute boolean cookieEnabled;
};
// https://w3c.github.io/webvr/#interface-navigator
partial interface Navigator {
[SameObject, Pref="dom.webvr.enabled"] readonly attribute VR vr;
};

View file

@ -0,0 +1,10 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-navigator
[Pref="dom.webvr.enabled"]
interface VR: EventTarget {
Promise<sequence<VRDisplay>> getDisplays();
//readonly attribute FrozenArray<VRDisplay> activeVRDisplays;
};

View file

@ -0,0 +1,131 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
enum VREye {
"left",
"right"
};
// https://w3c.github.io/webvr/#interface-vrdisplay
[Pref="dom.webvr.enabled"]
interface VRDisplay : EventTarget {
readonly attribute boolean isConnected;
readonly attribute boolean isPresenting;
/**
* Dictionary of capabilities describing the VRDisplay.
*/
[SameObject] readonly attribute VRDisplayCapabilities capabilities;
/**
* If this VRDisplay supports room-scale experiences, the optional
* stage attribute contains details on the room-scale parameters.
* The stageParameters attribute can not change between null
* and non-null once the VRDisplay is enumerated; however,
* the values within VRStageParameters may change after
* any call to VRDisplay.submitFrame as the user may re-configure
* their environment at any time.
*/
readonly attribute VRStageParameters? stageParameters;
/**
* Return the current VREyeParameters for the given eye.
*/
VREyeParameters getEyeParameters(VREye whichEye);
/**
* An identifier for this distinct VRDisplay. Used as an
* association point in the Gamepad API.
*/
readonly attribute unsigned long displayId;
/**
* A display name, a user-readable name identifying it.
*/
readonly attribute DOMString displayName;
/**
* Populates the passed VRFrameData with the information required to render
* the current frame.
*/
boolean getFrameData(VRFrameData frameData);
/**
* Return a VRPose containing the future predicted pose of the VRDisplay
* when the current frame will be presented. The value returned will not
* change until JavaScript has returned control to the browser.
*
* The VRPose will contain the position, orientation, velocity,
* and acceleration of each of these properties.
*/
[NewObject] VRPose getPose();
/**
* Reset the pose for this display, treating its current position and
* orientation as the "origin/zero" values. VRPose.position,
* VRPose.orientation, and VRStageParameters.sittingToStandingTransform may be
* updated when calling resetPose(). This should be called in only
* sitting-space experiences.
*/
void resetPose();
/**
* z-depth defining the near plane of the eye view frustum
* enables mapping of values in the render target depth
* attachment to scene coordinates. Initially set to 0.01.
*/
attribute double depthNear;
/**
* z-depth defining the far plane of the eye view frustum
* enables mapping of values in the render target depth
* attachment to scene coordinates. Initially set to 10000.0.
*/
attribute double depthFar;
/**
* The callback passed to `requestAnimationFrame` will be called
* any time a new frame should be rendered. When the VRDisplay is
* presenting the callback will be called at the native refresh
* rate of the HMD. When not presenting this function acts
* identically to how window.requestAnimationFrame acts. Content should
* make no assumptions of frame rate or vsync behavior as the HMD runs
* asynchronously from other displays and at differing refresh rates.
*/
unsigned long requestAnimationFrame(FrameRequestCallback callback);
/**
* Passing the value returned by `requestAnimationFrame` to
* `cancelAnimationFrame` will unregister the callback.
*/
void cancelAnimationFrame(unsigned long handle);
/**
* Begin presenting to the VRDisplay. Must be called in response to a user gesture.
* Repeat calls while already presenting will update the VRLayers being displayed.
* If the number of values in the leftBounds/rightBounds arrays is not 0 or 4 for
* any of the passed layers the promise is rejected.
* If the source of any of the layers is not present (null), the promise is rejected.
*/
Promise<void> requestPresent(sequence<VRLayer> layers);
/**
* Stops presenting to the VRDisplay.
*/
Promise<void> exitPresent();
/**
* Get the layers currently being presented.
*/
//sequence<VRLayer> getLayers();
/**
* The VRLayer provided to the VRDisplay will be captured and presented
* in the HMD. Calling this function has the same effect on the source
* canvas as any other operation that uses its source image, and canvases
* created without preserveDrawingBuffer set to true will be cleared.
*/
void submitFrame();
};

View file

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrdisplaycapabilities
[Pref="dom.webvr.enabled"]
interface VRDisplayCapabilities {
readonly attribute boolean hasPosition;
readonly attribute boolean hasOrientation;
readonly attribute boolean hasExternalDisplay;
readonly attribute boolean canPresent;
readonly attribute unsigned long maxLayers;
};

View file

@ -0,0 +1,23 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrdisplayevent
enum VRDisplayEventReason {
"navigation",
"mounted",
"unmounted",
"requested"
};
[Pref="dom.webvr.enabled", Constructor(DOMString type, VRDisplayEventInit eventInitDict)]
interface VRDisplayEvent : Event {
readonly attribute VRDisplay display;
readonly attribute VRDisplayEventReason? reason;
};
dictionary VRDisplayEventInit : EventInit {
required VRDisplay display;
VRDisplayEventReason reason;
};

View file

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vreyeparameters
[Pref="dom.webvr.enabled"]
interface VREyeParameters {
readonly attribute Float32Array offset;
[SameObject] readonly attribute VRFieldOfView fieldOfView;
readonly attribute unsigned long renderWidth;
readonly attribute unsigned long renderHeight;
};

View file

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrfieldofview
[Pref="dom.webvr.enabled"]
interface VRFieldOfView {
readonly attribute double upDegrees;
readonly attribute double rightDegrees;
readonly attribute double downDegrees;
readonly attribute double leftDegrees;
};

View file

@ -0,0 +1,15 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrframedata
[Pref="dom.webvr.enabled", Constructor]
interface VRFrameData {
readonly attribute DOMHighResTimeStamp timestamp;
readonly attribute Float32Array leftProjectionMatrix;
readonly attribute Float32Array leftViewMatrix;
readonly attribute Float32Array rightProjectionMatrix;
readonly attribute Float32Array rightViewMatrix;
readonly attribute VRPose pose;
};

View file

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrlayer
//typedef (HTMLCanvasElement or OffscreenCanvas) VRSource;
dictionary VRLayer {
HTMLCanvasElement source;
sequence<float> leftBounds;
sequence<float> rightBounds;
};

View file

@ -0,0 +1,14 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrpose
[Pref="dom.webvr.enabled"]
interface VRPose {
readonly attribute Float32Array? position;
readonly attribute Float32Array? linearVelocity;
readonly attribute Float32Array? linearAcceleration;
readonly attribute Float32Array? orientation;
readonly attribute Float32Array? angularVelocity;
readonly attribute Float32Array? angularAcceleration;
};

View file

@ -0,0 +1,11 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrstageparameters
[Pref="dom.webvr.enabled"]
interface VRStageParameters {
readonly attribute Float32Array sittingToStandingTransform;
readonly attribute float sizeX;
readonly attribute float sizeZ;
};

View file

@ -110,6 +110,7 @@ use timers::{IsInterval, TimerCallback};
use tinyfiledialogs::{self, MessageBoxIcon};
use url::Position;
use webdriver_handlers::jsval_to_webdriver;
use webvr_traits::WebVRMsg;
/// Current state of the window object
#[derive(JSTraceable, Copy, Clone, Debug, PartialEq, HeapSizeOf)]
@ -241,6 +242,10 @@ pub struct Window {
media_query_lists: WeakMediaQueryListVec,
test_runner: MutNullableJS<TestRunner>,
/// A handle for communicating messages to the webvr thread, if available.
#[ignore_heap_size_of = "channels are hard"]
webvr_thread: Option<IpcSender<WebVRMsg>>
}
impl Window {
@ -321,6 +326,10 @@ impl Window {
pub fn current_viewport(&self) -> Rect<Au> {
self.current_viewport.clone().get()
}
pub fn webvr_thread(&self) -> Option<IpcSender<WebVRMsg>> {
self.webvr_thread.clone()
}
}
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
@ -1590,7 +1599,8 @@ impl Window {
layout_chan: Sender<Msg>,
id: PipelineId,
parent_info: Option<(PipelineId, FrameType)>,
window_size: Option<WindowSizeData>)
window_size: Option<WindowSizeData>,
webvr_thread: Option<IpcSender<WebVRMsg>>)
-> Root<Window> {
let layout_rpc: Box<LayoutRPC + Send> = {
let (rpc_send, rpc_recv) = channel();
@ -1654,6 +1664,7 @@ impl Window {
scroll_offsets: DOMRefCell::new(HashMap::new()),
media_query_lists: WeakMediaQueryListVec::new(),
test_runner: Default::default(),
webvr_thread: webvr_thread
};
unsafe {

View file

@ -98,6 +98,7 @@ extern crate url;
extern crate uuid;
extern crate webrender_traits;
extern crate websocket;
extern crate webvr_traits;
extern crate xml5ever;
mod body;

View file

@ -80,6 +80,7 @@ pub enum ScriptThreadEventCategory {
ServiceWorkerEvent,
EnterFullscreen,
ExitFullscreen,
WebVREvent
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM

View file

@ -90,6 +90,7 @@ use script_traits::{ScriptThreadFactory, TimerEvent, TimerEventRequest, TimerSou
use script_traits::{TouchEventType, TouchId, UntrustedNodeAddress, WindowSizeData, WindowSizeType};
use script_traits::CompositorEvent::{KeyEvent, MouseButtonEvent, MouseMoveEvent, ResizeEvent};
use script_traits::CompositorEvent::{TouchEvent, TouchpadPressureEvent};
use script_traits::WebVREventMsg;
use script_traits::webdriver_msg::WebDriverScriptCommand;
use serviceworkerjob::{Job, JobQueue, AsyncJobHandler, FinishJobHandler, InvokeType, SettleType};
use servo_config::opts;
@ -116,6 +117,7 @@ use task_source::user_interaction::{UserInteractionTask, UserInteractionTaskSour
use time::Tm;
use url::Position;
use webdriver_handlers;
use webvr_traits::WebVRMsg;
thread_local!(pub static STACK_ROOTS: Cell<Option<RootCollectionPtr>> = Cell::new(None));
thread_local!(static SCRIPT_THREAD_ROOT: Cell<Option<*const ScriptThread>> = Cell::new(None));
@ -477,6 +479,9 @@ pub struct ScriptThread {
content_process_shutdown_chan: IpcSender<()>,
promise_job_queue: PromiseJobQueue,
/// A handle to the webvr thread, if available
webvr_thread: Option<IpcSender<WebVRMsg>>,
}
/// In the event of thread panic, all data on the stack runs its destructor. However, there
@ -699,6 +704,8 @@ impl ScriptThread {
promise_job_queue: PromiseJobQueue::new(),
layout_to_constellation_chan: state.layout_to_constellation_chan,
webvr_thread: state.webvr_thread
}
}
@ -945,6 +952,7 @@ impl ScriptThread {
ScriptThreadEventCategory::SetViewport => ProfilerCategory::ScriptSetViewport,
ScriptThreadEventCategory::TimerEvent => ProfilerCategory::ScriptTimerEvent,
ScriptThreadEventCategory::WebSocketEvent => ProfilerCategory::ScriptWebSocketEvent,
ScriptThreadEventCategory::WebVREvent => ProfilerCategory::ScriptWebVREvent,
ScriptThreadEventCategory::WorkerEvent => ProfilerCategory::ScriptWorkerEvent,
ScriptThreadEventCategory::ServiceWorkerEvent => ProfilerCategory::ScriptServiceWorkerEvent,
ScriptThreadEventCategory::EnterFullscreen => ProfilerCategory::ScriptEnterFullscreen,
@ -1009,6 +1017,8 @@ impl ScriptThread {
self.handle_reload(pipeline_id),
ConstellationControlMsg::ExitPipeline(pipeline_id, discard_browsing_context) =>
self.handle_exit_pipeline_msg(pipeline_id, discard_browsing_context),
ConstellationControlMsg::WebVREvent(pipeline_id, event) =>
self.handle_webvr_event(pipeline_id, event),
msg @ ConstellationControlMsg::AttachLayout(..) |
msg @ ConstellationControlMsg::Viewport(..) |
msg @ ConstellationControlMsg::SetScrollState(..) |
@ -1751,7 +1761,8 @@ impl ScriptThread {
incomplete.layout_chan,
incomplete.pipeline_id,
incomplete.parent_info,
incomplete.window_size);
incomplete.window_size,
self.webvr_thread.clone());
let frame_element = frame_element.r().map(Castable::upcast);
let browsing_context = BrowsingContext::new(&window, frame_element);
@ -2212,6 +2223,14 @@ impl ScriptThread {
}
}
fn handle_webvr_event(&self, pipeline_id: PipelineId, event: WebVREventMsg) {
let window = self.documents.borrow().find_window(pipeline_id);
if let Some(window) = window {
let navigator = window.Navigator();
navigator.handle_webvr_event(event);
}
}
pub fn enqueue_promise_job(job: EnqueuedPromiseCallback, global: &GlobalScope) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };

View file

@ -35,3 +35,4 @@ servo_url = {path = "../url", features = ["servo"]}
style_traits = {path = "../style_traits", features = ["servo"]}
time = "0.1.12"
url = {version = "1.2", features = ["heap_size"]}
webvr_traits = {path = "../webvr_traits"}

View file

@ -35,6 +35,7 @@ extern crate serde_derive;
extern crate servo_url;
extern crate style_traits;
extern crate time;
extern crate webvr_traits;
mod script_msg;
pub mod webdriver_msg;
@ -71,6 +72,7 @@ use std::fmt;
use std::sync::mpsc::{Receiver, Sender};
use style_traits::{PagePx, UnsafeNode, ViewportPx};
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
use webvr_traits::{WebVRDisplayEvent, WebVRMsg};
pub use script_msg::{LayoutMsg, ScriptMsg, EventResult, LogEntry};
pub use script_msg::{ServiceWorkerMsg, ScopeThings, SWManagerMsg, SWManagerSenders, DOMMessage};
@ -263,6 +265,8 @@ pub enum ConstellationControlMsg {
ReportCSSError(PipelineId, String, usize, usize, String),
/// Reload the given page.
Reload(PipelineId),
/// Notifies the script thread of a WebVR device event
WebVREvent(PipelineId, WebVREventMsg)
}
impl fmt::Debug for ConstellationControlMsg {
@ -295,6 +299,7 @@ impl fmt::Debug for ConstellationControlMsg {
FramedContentChanged(..) => "FramedContentChanged",
ReportCSSError(..) => "ReportCSSError",
Reload(..) => "Reload",
WebVREvent(..) => "WebVREvent",
};
write!(formatter, "ConstellationMsg::{}", variant)
}
@ -478,6 +483,8 @@ pub struct InitialScriptState {
pub pipeline_namespace_id: PipelineNamespaceId,
/// A ping will be sent on this channel once the script thread shuts down.
pub content_process_shutdown_chan: IpcSender<()>,
/// A channel to the webvr thread, if available.
pub webvr_thread: Option<IpcSender<WebVRMsg>>
}
/// This trait allows creating a `ScriptThread` without depending on the `script`
@ -716,6 +723,18 @@ pub enum ConstellationMsg {
Reload,
/// A log entry, with the top-level frame id and thread name
LogEntry(Option<FrameId>, Option<String>, LogEntry),
/// Set the WebVR thread channel.
SetWebVRThread(IpcSender<WebVRMsg>),
/// Dispatch a WebVR event to the subscribed script threads.
WebVREvent(Vec<PipelineId>, WebVREventMsg),
}
/// Messages to the constellation originating from the WebVR thread.
/// Used to dispatch VR Headset state events: connected, unconnected, and more.
#[derive(Deserialize, Serialize, Clone)]
pub enum WebVREventMsg {
/// Inform the constellation of a VR display event.
DisplayEvent(WebVRDisplayEvent)
}
/// Resources required by workerglobalscopes

View file

@ -54,6 +54,8 @@ servo_config = {path = "../config"}
servo_url = {path = "../url"}
style = {path = "../style", features = ["servo"]}
url = "1.2"
webvr = {path = "../webvr"}
webvr_traits = {path = "../webvr_traits"}
webdriver_server = {path = "../webdriver_server", optional = true}
[dependencies.webrender]

View file

@ -48,6 +48,8 @@ pub extern crate script_layout_interface;
pub extern crate servo_config;
pub extern crate servo_url;
pub extern crate style;
pub extern crate webvr;
pub extern crate webvr_traits;
#[cfg(feature = "webdriver")]
extern crate webdriver_server;
@ -96,6 +98,7 @@ use std::cmp::max;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use webvr::{WebVRThread, WebVRCompositorHandler};
pub use gleam::gl;
pub use servo_config as config;
@ -193,6 +196,7 @@ impl<Window> Browser<Window> where Window: WindowMethods + 'static {
debugger_chan,
devtools_chan,
supports_clipboard,
&webrender,
webrender_api_sender.clone());
// Send the constellation's swmanager sender to service worker manager thread
@ -260,6 +264,7 @@ fn create_constellation(user_agent: Cow<'static, str>,
debugger_chan: Option<debugger::Sender>,
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
supports_clipboard: bool,
webrender: &webrender::Renderer,
webrender_api_sender: webrender_traits::RenderApiSender)
-> (Sender<ConstellationMsg>, SWManagerSenders) {
let bluetooth_thread: IpcSender<BluetoothRequest> = BluetoothThreadFactory::new();
@ -295,6 +300,16 @@ fn create_constellation(user_agent: Cow<'static, str>,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>::start(initial_state);
if PREFS.is_webvr_enabled() {
// WebVR initialization
let (mut handler, sender) = WebVRCompositorHandler::new();
let webvr_thread = WebVRThread::spawn(constellation_chan.clone(), sender);
handler.set_webvr_thread_sender(webvr_thread.clone());
webrender.set_vr_compositor_handler(handler);
constellation_chan.send(ConstellationMsg::SetWebVRThread(webvr_thread)).unwrap();
}
if let Some(url) = url {
constellation_chan.send(ConstellationMsg::InitLoadUrl(url)).unwrap();
};

View file

@ -0,0 +1,23 @@
[package]
name = "webvr"
version = "0.0.1"
authors = ["The Servo Project Developers"]
license = "MPL-2.0"
publish = false
[lib]
name = "webvr"
path = "lib.rs"
[dependencies]
ipc-channel = "0.5"
log = "0.3"
msg = {path = "../msg"}
script_traits = {path = "../script_traits"}
servo_config = {path = "../config"}
webvr_traits = {path = "../webvr_traits" }
[dependencies.webrender_traits]
git = "https://github.com/servo/webrender"
default_features = false
features = ["serde_derive"]

19
components/webvr/lib.rs Normal file
View file

@ -0,0 +1,19 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(custom_derive)]
#![feature(plugin)]
#![deny(unsafe_code)]
extern crate ipc_channel;
#[macro_use]
extern crate log;
extern crate msg;
extern crate script_traits;
extern crate servo_config;
extern crate webrender_traits;
extern crate webvr_traits;
mod webvr_thread;
pub use webvr_thread::{WebVRThread, WebVRCompositorHandler};

View file

@ -0,0 +1,377 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use ipc_channel::ipc;
use ipc_channel::ipc::{IpcReceiver, IpcSender};
use msg::constellation_msg::PipelineId;
use script_traits::{ConstellationMsg, WebVREventMsg};
use servo_config::prefs::PREFS;
use std::{thread, time};
use std::collections::{HashMap, HashSet};
use std::sync::mpsc;
use std::sync::mpsc::{Receiver, Sender};
use webrender_traits;
use webvr_traits::{WebVRMsg, WebVRResult};
use webvr_traits::webvr::*;
/// WebVRThread owns native VRDisplays, handles their life cycle inside Servo and
/// acts a doorman for untrusted VR requests from DOM Objects. These are the key components
/// * WebVRThread::spawn() creates a long living thread that waits for VR Commands from DOM objects
/// and handles them in its trusted thread. The back and forth comunication with DOM is implemented
/// using IPC-channels. This thread creates the VRServiceManager instance, which handles the life cycle
/// of all VR Vendor SDKs and owns all the native VRDisplays. These displays are guaranteed to live while
/// the spawned thread is alive. The WebVRThread is unique and it's closed using the Exit message when the
/// whole browser is going to be closed.
/// * A Event Polling thread is created in order to implement WebVR Events (connected, disconnected,..).
/// This thread wakes up the WebVRThread from time to time by sending a PollEvents message. This thread
/// is only created when there is at least one live JavaScript context using the WebVR APIs and shuts down it when
/// the tab is closed. A single instance of the thread is used to handle multiple JavaScript contexts.
/// Constellation channel is used to notify events to the Script Thread.
/// * When the WeVR APIs are used in a tab, it's pipeline_id is registered using the RegisterContext message. When
/// the tab is closed, UnregisterContext message is sent. This way the WebVR thread has a list of the pipeline
/// ids using the WebVR APIs. These ids are used to implement privacy guidelines defined in the WebVR Spec.
/// * When a JavaScript thread gains access to present to a headset, WebVRThread is not used as a intermediary in
/// the VRDisplay.requestAnimationFrame loop in order to minimize latency. A direct communication with WebRender
/// is used instead. See WebVRCompositorHandler and the VRCompositorCommanda for more details.
pub struct WebVRThread {
receiver: IpcReceiver<WebVRMsg>,
sender: IpcSender<WebVRMsg>,
service: VRServiceManager,
contexts: HashSet<PipelineId>,
constellation_chan: Sender<ConstellationMsg>,
vr_compositor_chan: WebVRCompositorSender,
polling_events: bool,
presenting: HashMap<u64, PipelineId>
}
impl WebVRThread {
fn new(receiver: IpcReceiver<WebVRMsg>,
sender: IpcSender<WebVRMsg>,
constellation_chan: Sender<ConstellationMsg>,
vr_compositor_chan: WebVRCompositorSender)
-> WebVRThread {
let mut service = VRServiceManager::new();
service.register_defaults();
WebVRThread {
receiver: receiver,
sender: sender,
service: service,
contexts: HashSet::new(),
constellation_chan: constellation_chan,
vr_compositor_chan: vr_compositor_chan,
polling_events: false,
presenting: HashMap::new()
}
}
pub fn spawn(constellation_chan: Sender<ConstellationMsg>,
vr_compositor_chan: WebVRCompositorSender)
-> IpcSender<WebVRMsg> {
let (sender, receiver) = ipc::channel().unwrap();
let sender_clone = sender.clone();
thread::Builder::new().name("WebVRThread".into()).spawn(move || {
WebVRThread::new(receiver, sender_clone, constellation_chan, vr_compositor_chan).start();
}).expect("Thread spawning failed");
sender
}
fn start(&mut self) {
while let Ok(msg) = self.receiver.recv() {
match msg {
WebVRMsg::RegisterContext(context) => {
self.handle_register_context(context);
self.schedule_poll_events();
},
WebVRMsg::UnregisterContext(context) => {
self.handle_unregister_context(context);
},
WebVRMsg::PollEvents(sender) => {
self.poll_events(sender);
},
WebVRMsg::GetDisplays(sender) => {
self.handle_get_displays(sender);
self.schedule_poll_events();
},
WebVRMsg::GetFrameData(pipeline_id, display_id, near, far, sender) => {
self.handle_framedata(pipeline_id, display_id, near, far, sender);
},
WebVRMsg::ResetPose(pipeline_id, display_id, sender) => {
self.handle_reset_pose(pipeline_id, display_id, sender);
},
WebVRMsg::RequestPresent(pipeline_id, display_id, sender) => {
self.handle_request_present(pipeline_id, display_id, sender);
},
WebVRMsg::ExitPresent(pipeline_id, display_id, sender) => {
self.handle_exit_present(pipeline_id, display_id, sender);
},
WebVRMsg::CreateCompositor(display_id) => {
self.handle_create_compositor(display_id);
},
WebVRMsg::Exit => {
break
},
}
}
}
fn handle_register_context(&mut self, ctx: PipelineId) {
self.contexts.insert(ctx);
}
fn handle_unregister_context(&mut self, ctx: PipelineId) {
self.contexts.remove(&ctx);
}
fn handle_get_displays(&mut self, sender: IpcSender<WebVRResult<Vec<VRDisplayData>>>) {
let displays = self.service.get_displays();
let mut result = Vec::new();
for display in displays {
result.push(display.borrow().data());
}
sender.send(Ok(result)).unwrap();
}
fn handle_framedata(&mut self,
pipeline: PipelineId,
display_id: u64,
near: f64,
far: f64,
sender: IpcSender<WebVRResult<VRFrameData>>) {
match self.access_check(pipeline, display_id) {
Ok(display) => {
sender.send(Ok(display.borrow().inmediate_frame_data(near, far))).unwrap()
},
Err(msg) => sender.send(Err(msg.into())).unwrap()
}
}
fn handle_reset_pose(&mut self,
pipeline: PipelineId,
display_id: u64,
sender: IpcSender<WebVRResult<VRDisplayData>>) {
match self.access_check(pipeline, display_id) {
Ok(display) => {
display.borrow_mut().reset_pose();
sender.send(Ok(display.borrow().data())).unwrap();
},
Err(msg) => {
sender.send(Err(msg.into())).unwrap()
}
}
}
// This method implements the privacy and security guidelines defined in the WebVR spec.
// For example a secondary tab is not allowed to read VRDisplay data or stop a VR presentation
// while the user is having a VR experience in the current tab.
// These security rules also avoid multithreading race conditions between WebVRThread and
// Webrender thread. See WebVRCompositorHandler implementation notes for more details about this.
fn access_check(&self, pipeline: PipelineId, display_id: u64) -> Result<&VRDisplayPtr, &'static str> {
if *self.presenting.get(&display_id).unwrap_or(&pipeline) != pipeline {
return Err("No access granted to this Display because it's presenting on other JavaScript Tab");
}
self.service.get_display(display_id).ok_or("Device not found")
}
fn handle_request_present(&mut self,
pipeline: PipelineId,
display_id: u64,
sender: IpcSender<WebVRResult<()>>) {
match self.access_check(pipeline, display_id).map(|d| d.clone()) {
Ok(display) => {
self.presenting.insert(display_id, pipeline);
let data = display.borrow().data();
sender.send(Ok(())).unwrap();
self.notify_event(VRDisplayEvent::PresentChange(data, true));
},
Err(msg) => {
sender.send(Err(msg.into())).unwrap();
}
}
}
fn handle_exit_present(&mut self,
pipeline: PipelineId,
display_id: u64,
sender: Option<IpcSender<WebVRResult<()>>>) {
match self.access_check(pipeline, display_id).map(|d| d.clone()) {
Ok(display) => {
self.presenting.remove(&display_id);
if let Some(sender) = sender {
sender.send(Ok(())).unwrap();
}
let data = display.borrow().data();
self.notify_event(VRDisplayEvent::PresentChange(data, false));
},
Err(msg) => {
if let Some(sender) = sender {
sender.send(Err(msg.into())).unwrap();
}
}
}
}
fn handle_create_compositor(&mut self, display_id: u64) {
let compositor = self.service.get_display(display_id).map(|d| WebVRCompositor(d.as_ptr()));
self.vr_compositor_chan.send(compositor).unwrap();
}
fn poll_events(&mut self, sender: IpcSender<bool>) {
loop {
let events = self.service.poll_events();
if events.is_empty() {
break;
}
self.notify_events(events)
}
// Stop polling events if the callers are not using VR
self.polling_events = self.contexts.len() > 0;
sender.send(self.polling_events).unwrap();
}
fn notify_events(&self, events: Vec<VRDisplayEvent>) {
let pipeline_ids: Vec<PipelineId> = self.contexts.iter().map(|c| *c).collect();
for event in events {
let event = WebVREventMsg::DisplayEvent(event);
self.constellation_chan.send(ConstellationMsg::WebVREvent(pipeline_ids.clone(), event)).unwrap();
}
}
#[inline]
fn notify_event(&self, event: VRDisplayEvent) {
self.notify_events(vec![event]);
}
fn schedule_poll_events(&mut self) {
if !self.service.is_initialized() || self.polling_events {
return;
}
self.polling_events = true;
let webvr_thread = self.sender.clone();
let (sender, receiver) = ipc::channel().unwrap();
// Defines the polling interval time in ms for VR Events such as VRDisplay connected, disconnected, etc.
let polling_interval: u64 = PREFS.get("dom.webvr.event_polling_interval").as_u64().unwrap_or(500);
thread::Builder::new().name("WebVRPollEvents".into()).spawn(move || {
loop {
if webvr_thread.send(WebVRMsg::PollEvents(sender.clone())).is_err() {
// WebVR Thread closed
break;
}
if !receiver.recv().unwrap_or(false) {
// WebVR Thread asked to unschedule this thread
break;
}
thread::sleep(time::Duration::from_millis(polling_interval));
}
}).expect("Thread spawning failed");
}
}
/// Notes about WebVRCompositorHandler implementation:
/// Raw pointers are used instead of Arc<Mutex> as a heavy optimization for latency reasons.
/// This also avoids "JS DDoS" attacks: like a secondary JavaScript tab degrading performance
/// by flooding the WebVRThread with messages while the main JavaScript tab is presenting to the headset.
/// Multithreading won't be a problem because:
/// * Thanks to the security rules implemented in the WebVRThread, when a VRDisplay is in a presenting loop
/// no other JSContext is granted access to the VRDisplay. So really there arent multithreading race conditions.
/// * VRDisplay implementations are designed to allow calling compositor functions
/// in another thread by using the Send + Sync traits.
/// VRDisplays pointers are guaranteed to be valid memory:
/// * VRDisplays are owned by the VRServiceManager which lives in the WebVRThread.
/// * WebVRCompositorHandler is stopped automatically when a JS tab is closed or the whole browser is closed.
/// * WebVRThread and its VRDisplays are destroyed after all tabs are dropped and the browser is about to exit.
/// WebVRThread is closed using the Exit message.
pub struct WebVRCompositor(*mut VRDisplay);
pub struct WebVRCompositorHandler {
compositors: HashMap<webrender_traits::VRCompositorId, WebVRCompositor>,
webvr_thread_receiver: Receiver<Option<WebVRCompositor>>,
webvr_thread_sender: Option<IpcSender<WebVRMsg>>
}
#[allow(unsafe_code)]
unsafe impl Send for WebVRCompositor {}
pub type WebVRCompositorSender = Sender<Option<WebVRCompositor>>;
impl WebVRCompositorHandler {
pub fn new() -> (Box<WebVRCompositorHandler>, WebVRCompositorSender) {
let (sender, receiver) = mpsc::channel();
let instance = Box::new(WebVRCompositorHandler {
compositors: HashMap::new(),
webvr_thread_receiver: receiver,
webvr_thread_sender: None
});
(instance, sender)
}
}
impl webrender_traits::VRCompositorHandler for WebVRCompositorHandler {
#[allow(unsafe_code)]
fn handle(&mut self, cmd: webrender_traits::VRCompositorCommand, texture_id: Option<u32>) {
match cmd {
webrender_traits::VRCompositorCommand::Create(compositor_id) => {
self.create_compositor(compositor_id);
}
webrender_traits::VRCompositorCommand::SyncPoses(compositor_id, near, far, sender) => {
if let Some(compositor) = self.compositors.get(&compositor_id) {
let pose = unsafe {
(*compositor.0).sync_poses();
(*compositor.0).synced_frame_data(near, far).to_bytes()
};
let _ = sender.send(Ok(pose));
} else {
let _ = sender.send(Err(()));
}
}
webrender_traits::VRCompositorCommand::SubmitFrame(compositor_id, left_bounds, right_bounds) => {
if let Some(compositor) = self.compositors.get(&compositor_id) {
if let Some(texture_id) = texture_id {
let layer = VRLayer {
texture_id: texture_id,
left_bounds: left_bounds,
right_bounds: right_bounds
};
unsafe {
(*compositor.0).submit_frame(&layer);
}
}
}
}
webrender_traits::VRCompositorCommand::Release(compositor_id) => {
self.compositors.remove(&compositor_id);
}
}
}
}
impl WebVRCompositorHandler {
#[allow(unsafe_code)]
fn create_compositor(&mut self, display_id: webrender_traits::VRCompositorId) {
let sender = match self.webvr_thread_sender {
Some(ref s) => s,
None => return,
};
sender.send(WebVRMsg::CreateCompositor(display_id)).unwrap();
let display = self.webvr_thread_receiver.recv().unwrap();
match display {
Some(display) => {
self.compositors.insert(display_id, display);
},
None => {
error!("VRDisplay not found when creating a new VRCompositor");
}
};
}
// This is done on only a per-platform basis on initialization.
pub fn set_webvr_thread_sender(&mut self, sender: IpcSender<WebVRMsg>) {
self.webvr_thread_sender = Some(sender);
}
}

View file

@ -0,0 +1,17 @@
[package]
name = "webvr_traits"
version = "0.0.1"
authors = ["The Servo Project Developers"]
license = "MPL-2.0"
publish = false
[lib]
name = "webvr_traits"
path = "lib.rs"
[dependencies]
ipc-channel = "0.5"
msg = {path = "../msg"}
serde = "0.8"
serde_derive = "0.8"
rust-webvr = {version = "0.1", features = ["serde-serialization"]}

View file

@ -0,0 +1,29 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(custom_derive)]
#![feature(plugin)]
#![deny(unsafe_code)]
extern crate ipc_channel;
extern crate msg;
extern crate serde;
#[macro_use]
extern crate serde_derive;
pub extern crate rust_webvr as webvr;
mod webvr_traits;
pub use webvr::VRDisplayData as WebVRDisplayData;
pub use webvr::VRDisplayCapabilities as WebVRDisplayCapabilities;
pub use webvr::VRDisplayEvent as WebVRDisplayEvent;
pub use webvr::VRDisplayEventReason as WebVRDisplayEventReason;
pub use webvr::VREye as WebVREye;
pub use webvr::VREyeParameters as WebVREyeParameters;
pub use webvr::VRFieldOfView as WebVRFieldOfView;
pub use webvr::VRFrameData as WebVRFrameData;
pub use webvr::VRLayer as WebVRLayer;
pub use webvr::VRPose as WebVRPose;
pub use webvr::VRStageParameters as WebVRStageParameters;
pub use webvr_traits::{WebVRMsg, WebVRResult};

View file

@ -0,0 +1,24 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use webvr::*;
pub type WebVRResult<T> = Result<T, String>;
// Messages from Script thread to WebVR thread.
#[derive(Deserialize, Serialize)]
pub enum WebVRMsg {
RegisterContext(PipelineId),
UnregisterContext(PipelineId),
PollEvents(IpcSender<bool>),
GetDisplays(IpcSender<WebVRResult<Vec<VRDisplayData>>>),
GetFrameData(PipelineId, u64, f64, f64, IpcSender<WebVRResult<VRFrameData>>),
ResetPose(PipelineId, u64, IpcSender<WebVRResult<VRDisplayData>>),
RequestPresent(PipelineId, u64, IpcSender<WebVRResult<()>>),
ExitPresent(PipelineId, u64, Option<IpcSender<WebVRResult<()>>>),
CreateCompositor(u64),
Exit,
}