WebVR API Implementation, r=larsbergstrom

This commit is contained in:
Imanol Fernandez 2016-12-16 18:39:35 +01:00
parent 13826970c4
commit c5705bff50
70 changed files with 13044 additions and 20 deletions

75
Cargo.lock generated
View file

@ -1,24 +1,12 @@
[root]
name = "webdriver_server"
name = "webvr_traits"
version = "0.0.1"
dependencies = [
"cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
"image 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ipc-channel 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"msg 0.0.1",
"net_traits 0.0.1",
"plugins 0.0.1",
"regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"script_traits 0.0.1",
"servo_config 0.0.1",
"servo_url 0.0.1",
"url 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"webdriver 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rust-webvr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.8.20 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 0.8.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -445,6 +433,7 @@ dependencies = [
"servo_url 0.0.1",
"style_traits 0.0.1",
"webrender_traits 0.11.0 (git+https://github.com/servo/webrender)",
"webvr_traits 0.0.1",
]
[[package]]
@ -1500,6 +1489,8 @@ dependencies = [
"webdriver_server 0.0.1",
"webrender 0.11.0 (git+https://github.com/servo/webrender)",
"webrender_traits 0.11.0 (git+https://github.com/servo/webrender)",
"webvr 0.0.1",
"webvr_traits 0.0.1",
]
[[package]]
@ -2218,6 +2209,18 @@ name = "regex-syntax"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rust-webvr"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libloading 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.8.20 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 0.8.20 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-demangle"
version = "0.1.3"
@ -2308,6 +2311,8 @@ dependencies = [
"uuid 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"webrender_traits 0.11.0 (git+https://github.com/servo/webrender)",
"websocket 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)",
"webvr 0.0.1",
"webvr_traits 0.0.1",
"xml5ever 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2379,6 +2384,7 @@ dependencies = [
"style_traits 0.0.1",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"webvr_traits 0.0.1",
]
[[package]]
@ -3178,6 +3184,29 @@ dependencies = [
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "webdriver_server"
version = "0.0.1"
dependencies = [
"cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
"image 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ipc-channel 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"msg 0.0.1",
"net_traits 0.0.1",
"plugins 0.0.1",
"regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"script_traits 0.0.1",
"servo_config 0.0.1",
"servo_url 0.0.1",
"url 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"webdriver 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "webrender"
version = "0.11.0"
@ -3238,6 +3267,19 @@ dependencies = [
"url 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "webvr"
version = "0.0.1"
dependencies = [
"ipc-channel 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"msg 0.0.1",
"script_traits 0.0.1",
"servo_config 0.0.1",
"webrender_traits 0.11.0 (git+https://github.com/servo/webrender)",
"webvr_traits 0.0.1",
]
[[package]]
name = "winapi"
version = "0.2.8"
@ -3500,6 +3542,7 @@ dependencies = [
"checksum ref_slice 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "825740057197b7d43025e7faf6477eaabc03434e153233da02d1f44602f71527"
"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
"checksum rust-webvr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0f1c2770eade344950b6959fb7f4c658200a252a61f265b3487383b82fafe61e"
"checksum rustc-demangle 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1430d286cadb237c17c885e25447c982c97113926bb579f4379c0eca8d9586dc"
"checksum rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "237546c689f20bb44980270c73c3b9edd0891c1be49cc1274406134a66d3957b"
"checksum rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "c5f5376ea5e30ce23c03eb77cbe4962b988deead10910c372b226388b594c084"

View file

@ -211,7 +211,8 @@ impl<'a> CanvasPaintThread<'a> {
}
}
}
CanvasMsg::WebGL(_) => panic!("Wrong message sent to Canvas2D thread"),
CanvasMsg::WebGL(_) => panic!("Wrong WebGL message sent to Canvas2D thread"),
CanvasMsg::WebVR(_) => panic!("Wrong WebVR message sent to Canvas2D thread"),
}
}
}).expect("Thread spawning failed");

View file

@ -144,6 +144,18 @@ impl WebGLPaintThread {
}
}
fn handle_webvr_message(&self, message: webrender_traits::VRCompositorCommand) {
match self.data {
WebGLPaintTaskData::WebRender(ref api, id) => {
api.send_vr_compositor_command(id, message);
}
WebGLPaintTaskData::Readback(..) => {
error!("Webrender is required for WebVR implementation");
}
}
}
/// Creates a new `WebGLPaintThread` and returns an `IpcSender` to
/// communicate with it.
pub fn start(size: Size2D<i32>,
@ -190,6 +202,7 @@ impl WebGLPaintThread {
}
}
CanvasMsg::Canvas2d(_) => panic!("Wrong message sent to WebGLThread"),
CanvasMsg::WebVR(message) => painter.handle_webvr_message(message)
}
}
}).expect("Thread spawning failed");

View file

@ -27,7 +27,7 @@ use euclid::size::Size2D;
use ipc_channel::ipc::IpcSender;
use std::default::Default;
use std::str::FromStr;
use webrender_traits::{WebGLCommand, WebGLContextId};
use webrender_traits::{WebGLCommand, WebGLContextId, VRCompositorCommand};
#[derive(Clone, Deserialize, Serialize)]
pub enum FillRule {
@ -42,6 +42,7 @@ pub enum CanvasMsg {
FromLayout(FromLayoutMsg),
FromScript(FromScriptMsg),
WebGL(WebGLCommand),
WebVR(VRCompositorCommand)
}
#[derive(Clone, Deserialize, Serialize)]

View file

@ -259,4 +259,8 @@ impl Preferences {
pub fn extend(&self, extension: HashMap<String, Pref>) {
self.0.write().unwrap().extend(extension);
}
pub fn is_webvr_enabled(&self) -> bool {
self.get("dom.webvr.enabled").as_boolean().unwrap_or(false)
}
}

View file

@ -36,6 +36,7 @@ servo_config = {path = "../config", features = ["servo"]}
servo_rand = {path = "../rand"}
servo_remutex = {path = "../remutex"}
servo_url = {path = "../url", features = ["servo"]}
webvr_traits = {path = "../webvr_traits"}
[dependencies.webrender_traits]
git = "https://github.com/servo/webrender"

View file

@ -101,6 +101,7 @@ use script_traits::{LayoutMsg as FromLayoutMsg, ScriptMsg as FromScriptMsg, Scri
use script_traits::{LogEntry, ServiceWorkerMsg, webdriver_msg};
use script_traits::{MozBrowserErrorType, MozBrowserEvent, WebDriverCommandMsg, WindowSizeData};
use script_traits::{SWManagerMsg, ScopeThings, WindowSizeType};
use script_traits::WebVREventMsg;
use servo_config::opts;
use servo_config::prefs::PREFS;
use servo_rand::{Rng, SeedableRng, ServoRng, random};
@ -122,6 +123,7 @@ use style_traits::cursor::Cursor;
use style_traits::viewport::ViewportConstraints;
use timer_scheduler::TimerScheduler;
use webrender_traits;
use webvr_traits::WebVRMsg;
/// The `Constellation` itself. In the servo browser, there is one
/// constellation, which maintains all of the browser global data.
@ -280,6 +282,9 @@ pub struct Constellation<Message, LTF, STF> {
/// Phantom data that keeps the Rust type system happy.
phantom: PhantomData<(Message, LTF, STF)>,
/// A channel through which messages can be sent to the webvr thread.
webvr_thread: Option<IpcSender<WebVRMsg>>,
}
/// State needed to construct a constellation.
@ -535,6 +540,7 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
info!("Using seed {} for random pipeline closure.", seed);
(rng, prob)
}),
webvr_thread: None
};
constellation.run();
@ -645,6 +651,7 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
prev_visibility: prev_visibility,
webrender_api_sender: self.webrender_api_sender.clone(),
is_private: is_private,
webvr_thread: self.webvr_thread.clone()
});
let pipeline = match result {
@ -879,6 +886,14 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
FromCompositorMsg::LogEntry(top_level_frame_id, thread_name, entry) => {
self.handle_log_entry(top_level_frame_id, thread_name, entry);
}
FromCompositorMsg::SetWebVRThread(webvr_thread) => {
assert!(self.webvr_thread.is_none());
self.webvr_thread = Some(webvr_thread)
}
FromCompositorMsg::WebVREvent(pipeline_ids, event) => {
debug!("constellation got WebVR event");
self.handle_webvr_event(pipeline_ids, event);
}
}
}
@ -1186,6 +1201,13 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
}
}
if let Some(chan) = self.webvr_thread.as_ref() {
debug!("Exiting WebVR thread.");
if let Err(e) = chan.send(WebVRMsg::Exit) {
warn!("Exit WebVR thread failed ({})", e);
}
}
debug!("Exiting font cache thread.");
self.font_cache_thread.exit();
@ -1274,6 +1296,18 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
}
}
fn handle_webvr_event(&mut self, ids: Vec<PipelineId>, event: WebVREventMsg) {
for id in ids {
match self.pipelines.get_mut(&id) {
Some(ref pipeline) => {
// Notify script thread
let _ = pipeline.event_loop.send(ConstellationControlMsg::WebVREvent(id, event.clone()));
},
None => warn!("constellation got webvr event for dead pipeline")
}
}
}
fn handle_init_load(&mut self, url: ServoUrl) {
let window_size = self.window_size.visible_viewport;
let root_pipeline_id = PipelineId::new();

View file

@ -41,6 +41,7 @@ extern crate servo_remutex;
extern crate servo_url;
extern crate style_traits;
extern crate webrender_traits;
extern crate webvr_traits;
mod constellation;
mod event_loop;

View file

@ -36,6 +36,7 @@ use std::rc::Rc;
use std::sync::mpsc::Sender;
use style_traits::{PagePx, ViewportPx};
use webrender_traits;
use webvr_traits::WebVRMsg;
/// A `Pipeline` is the constellation's view of a `Document`. Each pipeline has an
/// event loop (executed by a script thread) and a layout thread. A script thread
@ -169,6 +170,8 @@ pub struct InitialPipelineState {
/// Whether this pipeline is considered private.
pub is_private: bool,
/// A channel to the webvr thread.
pub webvr_thread: Option<IpcSender<WebVRMsg>>,
}
impl Pipeline {
@ -268,6 +271,7 @@ impl Pipeline {
script_content_process_shutdown_chan: script_content_process_shutdown_chan,
script_content_process_shutdown_port: script_content_process_shutdown_port,
webrender_api_sender: state.webrender_api_sender,
webvr_thread: state.webvr_thread,
};
// Spawn the child process.
@ -470,6 +474,7 @@ pub struct UnprivilegedPipelineContent {
script_content_process_shutdown_chan: IpcSender<()>,
script_content_process_shutdown_port: IpcReceiver<()>,
webrender_api_sender: webrender_traits::RenderApiSender,
webvr_thread: Option<IpcSender<WebVRMsg>>,
}
impl UnprivilegedPipelineContent {
@ -496,6 +501,7 @@ impl UnprivilegedPipelineContent {
window_size: self.window_size,
pipeline_namespace_id: self.pipeline_namespace_id,
content_process_shutdown_chan: self.script_content_process_shutdown_chan,
webvr_thread: self.webvr_thread
}, self.load_data.clone());
LTF::create(self.id,

View file

@ -151,6 +151,7 @@ impl Formattable for ProfilerCategory {
ProfilerCategory::ScriptServiceWorkerEvent => "Script Service Worker Event",
ProfilerCategory::ScriptEnterFullscreen => "Script Enter Fullscreen",
ProfilerCategory::ScriptExitFullscreen => "Script Exit Fullscreen",
ProfilerCategory::ScriptWebVREvent => "Script WebVR Event",
ProfilerCategory::ApplicationHeartbeat => "Application Heartbeat",
};
format!("{}{}", padding, name)

View file

@ -88,6 +88,7 @@ pub enum ProfilerCategory {
ScriptParseXML = 0x76,
ScriptEnterFullscreen = 0x77,
ScriptExitFullscreen = 0x78,
ScriptWebVREvent = 0x79,
ApplicationHeartbeat = 0x90,
}

View file

@ -82,6 +82,8 @@ url = {version = "1.2", features = ["heap_size", "query_encoding"]}
uuid = {version = "0.3.1", features = ["v4"]}
websocket = "0.17"
xml5ever = {version = "0.3.1", features = ["unstable"]}
webvr = {path = "../webvr"}
webvr_traits = {path = "../webvr_traits"}
[dependencies.webrender_traits]
git = "https://github.com/servo/webrender"

View file

@ -51,6 +51,9 @@ use js::jsapi::{JSObject, JSString, JS_GetArrayBufferViewType};
use js::jsapi::{JS_GetLatin1StringCharsAndLength, JS_GetObjectAsArrayBuffer, JS_GetObjectAsArrayBufferView};
use js::jsapi::{JS_GetReservedSlot, JS_GetTwoByteStringCharsAndLength};
use js::jsapi::{JS_IsArrayObject, JS_NewStringCopyN, JS_StringHasLatin1Chars};
use js::jsapi::{JS_NewFloat32Array, JS_NewFloat64Array};
use js::jsapi::{JS_NewInt8Array, JS_NewInt16Array, JS_NewInt32Array};
use js::jsapi::{JS_NewUint8Array, JS_NewUint16Array, JS_NewUint32Array};
use js::jsapi::{MutableHandleValue, Type};
use js::jsval::{ObjectValue, StringValue};
use js::rust::{ToString, get_object_class, is_dom_class, is_dom_object, maybe_wrap_value};
@ -463,6 +466,9 @@ pub unsafe trait ArrayBufferViewContents: Clone {
/// Check if the JS ArrayBufferView type is compatible with the implementor of the
/// trait
fn is_type_compatible(ty: Type) -> bool;
/// Creates a typed array
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject;
}
unsafe impl ArrayBufferViewContents for u8 {
@ -473,47 +479,79 @@ unsafe impl ArrayBufferViewContents for u8 {
_ => false,
}
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewUint8Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for i8 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int8 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewInt8Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for u16 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Uint16 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewUint16Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for i16 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int16 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewInt16Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for u32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Uint32 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewUint32Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for i32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int32 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewInt32Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for f32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Float32 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewFloat32Array(cx, num)
}
}
unsafe impl ArrayBufferViewContents for f64 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Float64 as i32
}
unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
JS_NewFloat64Array(cx, num)
}
}
/// Returns a mutable slice of the Array Buffer View data, viewed as T, without
@ -595,3 +633,23 @@ pub unsafe fn is_array_like(cx: *mut JSContext, value: HandleValue) -> bool {
assert!(JS_IsArrayObject(cx, value, &mut result));
result
}
/// Creates a typed JS array from a Rust slice
pub unsafe fn slice_to_array_buffer_view<T>(cx: *mut JSContext, data: &[T]) -> *mut JSObject
where T: ArrayBufferViewContents
{
let js_object = T::new(cx, data.len() as u32);
assert!(!js_object.is_null());
update_array_buffer_view(js_object, data);
js_object
}
/// Updates a typed JS array from a Rust slice
pub unsafe fn update_array_buffer_view<T>(obj: *mut JSObject, data: &[T])
where T: ArrayBufferViewContents
{
let mut buffer = array_buffer_view_data(obj);
if let Some(ref mut buffer) = buffer {
ptr::copy_nonoverlapping(&data[0], &mut buffer[0], data.len())
}
}

View file

@ -429,6 +429,15 @@ pub mod validation;
pub mod validitystate;
pub mod values;
pub mod virtualmethods;
pub mod vr;
pub mod vrdisplay;
pub mod vrdisplaycapabilities;
pub mod vrdisplayevent;
pub mod vreyeparameters;
pub mod vrfieldofview;
pub mod vrframedata;
pub mod vrpose;
pub mod vrstageparameters;
pub mod webgl_validations;
pub mod webglactiveinfo;
pub mod webglbuffer;

View file

@ -12,7 +12,9 @@ use dom::mimetypearray::MimeTypeArray;
use dom::navigatorinfo;
use dom::pluginarray::PluginArray;
use dom::serviceworkercontainer::ServiceWorkerContainer;
use dom::vr::VR;
use dom::window::Window;
use script_traits::WebVREventMsg;
#[dom_struct]
pub struct Navigator {
@ -21,6 +23,7 @@ pub struct Navigator {
plugins: MutNullableJS<PluginArray>,
mime_types: MutNullableJS<MimeTypeArray>,
service_worker: MutNullableJS<ServiceWorkerContainer>,
vr: MutNullableJS<VR>
}
impl Navigator {
@ -31,6 +34,7 @@ impl Navigator {
plugins: Default::default(),
mime_types: Default::default(),
service_worker: Default::default(),
vr: Default::default(),
}
}
@ -114,4 +118,16 @@ impl NavigatorMethods for Navigator {
true
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/webvr/#interface-navigator
fn Vr(&self) -> Root<VR> {
self.vr.or_init(|| VR::new(&self.global()))
}
}
impl Navigator {
pub fn handle_webvr_event(&self, event: WebVREventMsg) {
self.vr.get().expect("Shouldn't arrive here with an empty VR instance")
.handle_webvr_event(event);
}
}

160
components/script/dom/vr.rs Normal file
View file

@ -0,0 +1,160 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRBinding;
use dom::bindings::codegen::Bindings::VRBinding::VRMethods;
use dom::bindings::error::Error;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::promise::Promise;
use dom::vrdisplay::VRDisplay;
use dom::vrdisplayevent::VRDisplayEvent;
use ipc_channel::ipc;
use ipc_channel::ipc::IpcSender;
use script_traits::WebVREventMsg;
use std::rc::Rc;
use webvr_traits::WebVRMsg;
use webvr_traits::webvr;
#[dom_struct]
pub struct VR {
eventtarget: EventTarget,
displays: DOMRefCell<Vec<JS<VRDisplay>>>
}
impl VR {
fn new_inherited() -> VR {
VR {
eventtarget: EventTarget::new_inherited(),
displays: DOMRefCell::new(Vec::new())
}
}
pub fn new(global: &GlobalScope) -> Root<VR> {
let root = reflect_dom_object(box VR::new_inherited(),
global,
VRBinding::Wrap);
root.register();
root
}
}
impl Drop for VR {
fn drop(&mut self) {
self.unregister();
}
}
impl VRMethods for VR {
#[allow(unrooted_must_root)]
// https://w3c.github.io/webvr/#interface-navigator
fn GetDisplays(&self) -> Rc<Promise> {
let promise = Promise::new(&self.global());
if let Some(webvr_thread) = self.webvr_thread() {
let (sender, receiver) = ipc::channel().unwrap();
webvr_thread.send(WebVRMsg::GetDisplays(sender)).unwrap();
match receiver.recv().unwrap() {
Ok(displays) => {
// Sync displays
for display in displays {
self.sync_display(&display);
}
},
Err(e) => {
promise.reject_native(promise.global().get_cx(), &e);
return promise;
}
}
} else {
// WebVR spec: The Promise MUST be rejected if WebVR is not enabled/supported.
promise.reject_error(promise.global().get_cx(), Error::Security);
return promise;
}
// convert from JS to Root
let displays: Vec<Root<VRDisplay>> = self.displays.borrow().iter()
.map(|d| Root::from_ref(&**d))
.collect();
promise.resolve_native(promise.global().get_cx(), &displays);
promise
}
}
impl VR {
fn webvr_thread(&self) -> Option<IpcSender<WebVRMsg>> {
self.global().as_window().webvr_thread()
}
fn find_display(&self, display_id: u64) -> Option<Root<VRDisplay>> {
self.displays.borrow()
.iter()
.find(|d| d.get_display_id() == display_id)
.map(|d| Root::from_ref(&**d))
}
fn register(&self) {
if let Some(webvr_thread) = self.webvr_thread() {
let msg = WebVRMsg::RegisterContext(self.global().pipeline_id());
webvr_thread.send(msg).unwrap();
}
}
fn unregister(&self) {
if let Some(webvr_thread) = self.webvr_thread() {
let msg = WebVRMsg::UnregisterContext(self.global().pipeline_id());
webvr_thread.send(msg).unwrap();
}
}
fn sync_display(&self, display: &webvr::VRDisplayData) -> Root<VRDisplay> {
if let Some(existing) = self.find_display(display.display_id) {
existing.update_display(&display);
existing
} else {
let root = VRDisplay::new(&self.global(), display.clone());
self.displays.borrow_mut().push(JS::from_ref(&*root));
root
}
}
pub fn handle_webvr_event(&self, event: WebVREventMsg) {
let WebVREventMsg::DisplayEvent(event) = event;
match &event {
&webvr::VRDisplayEvent::Connect(ref display) => {
let display = self.sync_display(&display);
display.handle_webvr_event(&event);
self.notify_event(&display, &event);
},
&webvr::VRDisplayEvent::Disconnect(id) => {
if let Some(display) = self.find_display(id) {
display.handle_webvr_event(&event);
self.notify_event(&display, &event);
}
},
&webvr::VRDisplayEvent::Activate(ref display, _) |
&webvr::VRDisplayEvent::Deactivate(ref display, _) |
&webvr::VRDisplayEvent::Blur(ref display) |
&webvr::VRDisplayEvent::Focus(ref display) |
&webvr::VRDisplayEvent::PresentChange(ref display, _) |
&webvr::VRDisplayEvent::Change(ref display) => {
let display = self.sync_display(&display);
display.handle_webvr_event(&event);
}
};
}
fn notify_event(&self, display: &VRDisplay, event: &webvr::VRDisplayEvent) {
let event = VRDisplayEvent::new_from_webvr(&self.global(), &display, &event);
event.upcast::<Event>().fire(self.upcast());
}
}

View file

@ -0,0 +1,607 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::CanvasMsg;
use core::ops::Deref;
use dom::bindings::callback::ExceptionHandling;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::PerformanceBinding::PerformanceBinding::PerformanceMethods;
use dom::bindings::codegen::Bindings::VRDisplayBinding;
use dom::bindings::codegen::Bindings::VRDisplayBinding::VRDisplayMethods;
use dom::bindings::codegen::Bindings::VRDisplayBinding::VREye;
use dom::bindings::codegen::Bindings::VRLayerBinding::VRLayer;
use dom::bindings::codegen::Bindings::WindowBinding::FrameRequestCallback;
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{MutNullableJS, MutJS, Root};
use dom::bindings::num::Finite;
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::promise::Promise;
use dom::vrdisplaycapabilities::VRDisplayCapabilities;
use dom::vrdisplayevent::VRDisplayEvent;
use dom::vreyeparameters::VREyeParameters;
use dom::vrframedata::VRFrameData;
use dom::vrpose::VRPose;
use dom::vrstageparameters::VRStageParameters;
use dom::webglrenderingcontext::WebGLRenderingContext;
use ipc_channel::ipc;
use ipc_channel::ipc::{IpcSender, IpcReceiver};
use js::jsapi::JSContext;
use script_runtime::CommonScriptMsg;
use script_runtime::ScriptThreadEventCategory::WebVREvent;
use script_thread::Runnable;
use std::cell::Cell;
use std::mem;
use std::rc::Rc;
use std::sync::mpsc;
use std::thread;
use webrender_traits::VRCompositorCommand;
use webvr_traits::{WebVRDisplayData, WebVRDisplayEvent, WebVRFrameData, WebVRLayer, WebVRMsg};
#[dom_struct]
pub struct VRDisplay {
eventtarget: EventTarget,
#[ignore_heap_size_of = "Defined in rust-webvr"]
display: DOMRefCell<WebVRDisplayData>,
depth_near: Cell<f64>,
depth_far: Cell<f64>,
presenting: Cell<bool>,
left_eye_params: MutJS<VREyeParameters>,
right_eye_params: MutJS<VREyeParameters>,
capabilities: MutJS<VRDisplayCapabilities>,
stage_params: MutNullableJS<VRStageParameters>,
#[ignore_heap_size_of = "Defined in rust-webvr"]
frame_data: DOMRefCell<WebVRFrameData>,
#[ignore_heap_size_of = "Defined in rust-webvr"]
layer: DOMRefCell<WebVRLayer>,
layer_ctx: MutNullableJS<WebGLRenderingContext>,
#[ignore_heap_size_of = "Defined in rust-webvr"]
next_raf_id: Cell<u32>,
/// List of request animation frame callbacks
#[ignore_heap_size_of = "closures are hard"]
raf_callback_list: DOMRefCell<Vec<(u32, Option<Rc<FrameRequestCallback>>)>>,
// Compositor VRFrameData synchonization
frame_data_status: Cell<VRFrameDataStatus>,
#[ignore_heap_size_of = "channels are hard"]
frame_data_receiver: DOMRefCell<Option<IpcReceiver<Result<Vec<u8>, ()>>>>,
}
unsafe_no_jsmanaged_fields!(WebVRDisplayData);
unsafe_no_jsmanaged_fields!(WebVRFrameData);
unsafe_no_jsmanaged_fields!(WebVRLayer);
#[derive(Clone, Copy, PartialEq, Eq, HeapSizeOf)]
enum VRFrameDataStatus {
Waiting,
Synced,
Exit
}
unsafe_no_jsmanaged_fields!(VRFrameDataStatus);
impl VRDisplay {
fn new_inherited(global: &GlobalScope, display: WebVRDisplayData) -> VRDisplay {
let stage = match display.stage_parameters {
Some(ref params) => Some(VRStageParameters::new(params.clone(), &global)),
None => None
};
VRDisplay {
eventtarget: EventTarget::new_inherited(),
display: DOMRefCell::new(display.clone()),
depth_near: Cell::new(0.01),
depth_far: Cell::new(10000.0),
presenting: Cell::new(false),
left_eye_params: MutJS::new(&*VREyeParameters::new(display.left_eye_parameters.clone(), &global)),
right_eye_params: MutJS::new(&*VREyeParameters::new(display.right_eye_parameters.clone(), &global)),
capabilities: MutJS::new(&*VRDisplayCapabilities::new(display.capabilities.clone(), &global)),
stage_params: MutNullableJS::new(stage.as_ref().map(|v| v.deref())),
frame_data: DOMRefCell::new(Default::default()),
layer: DOMRefCell::new(Default::default()),
layer_ctx: MutNullableJS::default(),
next_raf_id: Cell::new(1),
raf_callback_list: DOMRefCell::new(vec![]),
frame_data_status: Cell::new(VRFrameDataStatus::Waiting),
frame_data_receiver: DOMRefCell::new(None),
}
}
pub fn new(global: &GlobalScope, display: WebVRDisplayData) -> Root<VRDisplay> {
reflect_dom_object(box VRDisplay::new_inherited(&global, display),
global,
VRDisplayBinding::Wrap)
}
}
impl Drop for VRDisplay {
fn drop(&mut self) {
if self.presenting.get() {
self.force_stop_present();
}
}
}
impl VRDisplayMethods for VRDisplay {
// https://w3c.github.io/webvr/#dom-vrdisplay-isconnected
fn IsConnected(&self) -> bool {
self.display.borrow().connected
}
// https://w3c.github.io/webvr/#dom-vrdisplay-ispresenting
fn IsPresenting(&self) -> bool {
self.presenting.get()
}
// https://w3c.github.io/webvr/#dom-vrdisplay-capabilities
fn Capabilities(&self) -> Root<VRDisplayCapabilities> {
Root::from_ref(&*self.capabilities.get())
}
// https://w3c.github.io/webvr/#dom-vrdisplay-stageparameters
fn GetStageParameters(&self) -> Option<Root<VRStageParameters>> {
self.stage_params.get().map(|s| Root::from_ref(&*s))
}
// https://w3c.github.io/webvr/#dom-vrdisplay-geteyeparameters
fn GetEyeParameters(&self, eye: VREye) -> Root<VREyeParameters> {
match eye {
VREye::Left => Root::from_ref(&*self.left_eye_params.get()),
VREye::Right => Root::from_ref(&*self.right_eye_params.get())
}
}
// https://w3c.github.io/webvr/#dom-vrdisplay-displayid
fn DisplayId(&self) -> u32 {
self.display.borrow().display_id as u32
}
// https://w3c.github.io/webvr/#dom-vrdisplay-displayname
fn DisplayName(&self) -> DOMString {
DOMString::from(self.display.borrow().display_name.clone())
}
// https://w3c.github.io/webvr/#dom-vrdisplay-getframedata-framedata-framedata
fn GetFrameData(&self, frameData: &VRFrameData) -> bool {
// If presenting we use a synced data with compositor for the whole frame
if self.presenting.get() {
if self.frame_data_status.get() == VRFrameDataStatus::Waiting {
self.sync_frame_data();
}
frameData.update(& self.frame_data.borrow());
return true;
}
// If not presenting we fetch inmediante VRFrameData
let (sender, receiver) = ipc::channel().unwrap();
self.webvr_thread().send(WebVRMsg::GetFrameData(self.global().pipeline_id(),
self.get_display_id(),
self.depth_near.get(),
self.depth_far.get(),
sender)).unwrap();
return match receiver.recv().unwrap() {
Ok(data) => {
frameData.update(&data);
true
},
Err(e) => {
error!("WebVR::GetFrameData: {:?}", e);
false
}
};
}
// https://w3c.github.io/webvr/#dom-vrdisplay-getpose
fn GetPose(&self) -> Root<VRPose> {
VRPose::new(&self.global(), &self.frame_data.borrow().pose)
}
// https://w3c.github.io/webvr/#dom-vrdisplay-resetpose
fn ResetPose(&self) -> () {
let (sender, receiver) = ipc::channel().unwrap();
self.webvr_thread().send(WebVRMsg::ResetPose(self.global().pipeline_id(),
self.get_display_id(),
sender)).unwrap();
if let Ok(data) = receiver.recv().unwrap() {
// Some VRDisplay data might change after calling ResetPose()
*self.display.borrow_mut() = data;
}
}
// https://w3c.github.io/webvr/#dom-vrdisplay-depthnear
fn DepthNear(&self) -> Finite<f64> {
Finite::wrap(self.depth_near.get())
}
// https://w3c.github.io/webvr/#dom-vrdisplay-depthnear
fn SetDepthNear(&self, value: Finite<f64>) -> () {
self.depth_near.set(*value.deref());
}
// https://w3c.github.io/webvr/#dom-vrdisplay-depthfar
fn DepthFar(&self) -> Finite<f64> {
Finite::wrap(self.depth_far.get())
}
// https://w3c.github.io/webvr/#dom-vrdisplay-depthfar
fn SetDepthFar(&self, value: Finite<f64>) -> () {
self.depth_far.set(*value.deref());
}
// https://w3c.github.io/webvr/#dom-vrdisplay-requestanimationframe
fn RequestAnimationFrame(&self, callback: Rc<FrameRequestCallback>) -> u32 {
if self.presenting.get() {
let raf_id = self.next_raf_id.get();
self.next_raf_id.set(raf_id + 1);
self.raf_callback_list.borrow_mut().push((raf_id, Some(callback)));
raf_id
} else {
// WebVR spec: When a VRDisplay is not presenting it should
// fallback to window.requestAnimationFrame.
self.global().as_window().RequestAnimationFrame(callback)
}
}
// https://w3c.github.io/webvr/#dom-vrdisplay-cancelanimationframe
fn CancelAnimationFrame(&self, handle: u32) -> () {
if self.presenting.get() {
let mut list = self.raf_callback_list.borrow_mut();
if let Some(mut pair) = list.iter_mut().find(|pair| pair.0 == handle) {
pair.1 = None;
}
} else {
// WebVR spec: When a VRDisplay is not presenting it should
// fallback to window.cancelAnimationFrame.
self.global().as_window().CancelAnimationFrame(handle);
}
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/webvr/#dom-vrdisplay-requestpresent
fn RequestPresent(&self, layers: Vec<VRLayer>) -> Rc<Promise> {
let promise = Promise::new(&self.global());
// TODO: WebVR spec: this method must be called in response to a user gesture
// WebVR spec: If canPresent is false the promise MUST be rejected
if !self.display.borrow().capabilities.can_present {
let msg = "VRDisplay canPresent is false".to_string();
promise.reject_native(promise.global().get_cx(), &msg);
return promise;
}
// Current WebVRSpec only allows 1 VRLayer if the VRDevice can present.
// Future revisions of this spec may allow multiple layers to enable more complex rendering effects
// such as compositing WebGL and DOM elements together.
// That functionality is not allowed by this revision of the spec.
if layers.len() != 1 {
let msg = "The number of layers must be 1".to_string();
promise.reject_native(promise.global().get_cx(), &msg);
return promise;
}
// Parse and validate received VRLayer
let layer = validate_layer(self.global().get_cx(), &layers[0]);
let layer_bounds;
let layer_ctx;
match layer {
Ok((bounds, ctx)) => {
layer_bounds = bounds;
layer_ctx = ctx;
},
Err(msg) => {
let msg = msg.to_string();
promise.reject_native(promise.global().get_cx(), &msg);
return promise;
}
};
// WebVR spec: Repeat calls while already presenting will update the VRLayers being displayed.
if self.presenting.get() {
*self.layer.borrow_mut() = layer_bounds;
self.layer_ctx.set(Some(&layer_ctx));
promise.resolve_native(promise.global().get_cx(), &());
return promise;
}
// Request Present
let (sender, receiver) = ipc::channel().unwrap();
self.webvr_thread().send(WebVRMsg::RequestPresent(self.global().pipeline_id(),
self.display.borrow().display_id,
sender))
.unwrap();
match receiver.recv().unwrap() {
Ok(()) => {
*self.layer.borrow_mut() = layer_bounds;
self.layer_ctx.set(Some(&layer_ctx));
self.init_present();
promise.resolve_native(promise.global().get_cx(), &());
},
Err(e) => {
promise.reject_native(promise.global().get_cx(), &e);
}
}
promise
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/webvr/#dom-vrdisplay-exitpresent
fn ExitPresent(&self) -> Rc<Promise> {
let promise = Promise::new(&self.global());
// WebVR spec: If the VRDisplay is not presenting the promise MUST be rejected.
if !self.presenting.get() {
let msg = "VRDisplay is not presenting".to_string();
promise.reject_native(promise.global().get_cx(), &msg);
return promise;
}
// Exit present
let (sender, receiver) = ipc::channel().unwrap();
self.webvr_thread().send(WebVRMsg::ExitPresent(self.global().pipeline_id(),
self.display.borrow().display_id,
Some(sender)))
.unwrap();
match receiver.recv().unwrap() {
Ok(()) => {
self.stop_present();
promise.resolve_native(promise.global().get_cx(), &());
},
Err(e) => {
promise.reject_native(promise.global().get_cx(), &e);
}
}
promise
}
// https://w3c.github.io/webvr/#dom-vrdisplay-submitframe
fn SubmitFrame(&self) -> () {
if !self.presenting.get() {
warn!("VRDisplay not presenting");
return;
}
let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
let display_id = self.display.borrow().display_id;
let layer = self.layer.borrow();
let msg = VRCompositorCommand::SubmitFrame(display_id, layer.left_bounds, layer.right_bounds);
api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
}
}
impl VRDisplay {
fn webvr_thread(&self) -> IpcSender<WebVRMsg> {
self.global().as_window().webvr_thread().expect("Shouldn't arrive here with WebVR disabled")
}
pub fn get_display_id(&self) -> u64 {
self.display.borrow().display_id
}
pub fn update_display(&self, display: &WebVRDisplayData) {
*self.display.borrow_mut() = display.clone();
if let Some(ref stage) = display.stage_parameters {
if self.stage_params.get().is_none() {
let params = Some(VRStageParameters::new(stage.clone(), &self.global()));
self.stage_params.set(params.as_ref().map(|v| v.deref()));
} else {
self.stage_params.get().unwrap().update(&stage);
}
} else {
self.stage_params.set(None);
}
}
pub fn handle_webvr_event(&self, event: &WebVRDisplayEvent) {
match *event {
WebVRDisplayEvent::Connect(ref display) => {
self.update_display(&display);
},
WebVRDisplayEvent::Disconnect(_id) => {
self.display.borrow_mut().connected = false;
},
WebVRDisplayEvent::Activate(ref display, _) |
WebVRDisplayEvent::Deactivate(ref display, _) |
WebVRDisplayEvent::Blur(ref display) |
WebVRDisplayEvent::Focus(ref display) => {
self.update_display(&display);
self.notify_event(&event);
},
WebVRDisplayEvent::PresentChange(ref display, presenting) => {
self.update_display(&display);
self.presenting.set(presenting);
self.notify_event(&event);
},
WebVRDisplayEvent::Change(ref display) => {
// Change event doesn't exist in WebVR spec.
// So we update display data but don't notify JS.
self.update_display(&display);
}
};
}
fn notify_event(&self, event: &WebVRDisplayEvent) {
let root = Root::from_ref(&*self);
let event = VRDisplayEvent::new_from_webvr(&self.global(), &root, &event);
event.upcast::<Event>().fire(self.upcast());
}
fn init_present(&self) {
self.presenting.set(true);
let (sync_sender, sync_receiver) = ipc::channel().unwrap();
*self.frame_data_receiver.borrow_mut() = Some(sync_receiver);
let display_id = self.display.borrow().display_id;
let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
let js_sender = self.global().script_chan();
let address = Trusted::new(&*self);
let near_init = self.depth_near.get();
let far_init = self.depth_far.get();
// The render loop at native headset frame rate is implemented using a dedicated thread.
// Every loop iteration syncs pose data with the HMD, submits the pixels to the display and waits for Vsync.
// Both the requestAnimationFrame call of a VRDisplay in the JavaScript thread and the VRSyncPoses call
// in the Webrender thread are executed in parallel. This allows to get some JavaScript code executed ahead.
// while the render thread is syncing the VRFrameData to be used for the current frame.
// This thread runs until the user calls ExitPresent, the tab is closed or some unexpected error happened.
thread::Builder::new().name("WebVR_RAF".into()).spawn(move || {
let (raf_sender, raf_receiver) = mpsc::channel();
let mut near = near_init;
let mut far = far_init;
// Initialize compositor
api_sender.send(CanvasMsg::WebVR(VRCompositorCommand::Create(display_id))).unwrap();
loop {
// Run RAF callbacks on JavaScript thread
let msg = box NotifyDisplayRAF {
address: address.clone(),
sender: raf_sender.clone()
};
js_sender.send(CommonScriptMsg::RunnableMsg(WebVREvent, msg)).unwrap();
// Run Sync Poses in parallell on Render thread
let msg = VRCompositorCommand::SyncPoses(display_id, near, far, sync_sender.clone());
api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
// Wait until both SyncPoses & RAF ends
if let Ok(depth) = raf_receiver.recv().unwrap() {
near = depth.0;
far = depth.1;
} else {
// Stop thread
// ExitPresent called or some error happened
return;
}
}
}).expect("Thread spawning failed");
}
fn stop_present(&self) {
self.presenting.set(false);
*self.frame_data_receiver.borrow_mut() = None;
let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
let display_id = self.display.borrow().display_id;
let msg = VRCompositorCommand::Release(display_id);
api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
}
// Only called when the JSContext is destroyed while presenting.
// In this case we don't want to wait for WebVR Thread response.
fn force_stop_present(&self) {
self.webvr_thread().send(WebVRMsg::ExitPresent(self.global().pipeline_id(),
self.display.borrow().display_id,
None))
.unwrap();
self.stop_present();
}
fn sync_frame_data(&self) {
let status = if let Some(receiver) = self.frame_data_receiver.borrow().as_ref() {
match receiver.recv().unwrap() {
Ok(bytes) => {
*self.frame_data.borrow_mut() = WebVRFrameData::from_bytes(&bytes[..]);
VRFrameDataStatus::Synced
},
Err(()) => {
VRFrameDataStatus::Exit
}
}
} else {
VRFrameDataStatus::Exit
};
self.frame_data_status.set(status);
}
fn handle_raf(&self, end_sender: &mpsc::Sender<Result<(f64, f64), ()>>) {
self.frame_data_status.set(VRFrameDataStatus::Waiting);
let mut callbacks = mem::replace(&mut *self.raf_callback_list.borrow_mut(), vec![]);
let now = self.global().as_window().Performance().Now();
// Call registered VRDisplay.requestAnimationFrame callbacks.
for (_, callback) in callbacks.drain(..) {
if let Some(callback) = callback {
let _ = callback.Call__(Finite::wrap(*now), ExceptionHandling::Report);
}
}
if self.frame_data_status.get() == VRFrameDataStatus::Waiting {
// User didn't call getFrameData while presenting.
// We automatically reads the pending VRFrameData to avoid overflowing the IPC-Channel buffers.
// Show a warning as the WebVR Spec recommends.
warn!("WebVR: You should call GetFrameData while presenting");
self.sync_frame_data();
}
match self.frame_data_status.get() {
VRFrameDataStatus::Synced => {
// Sync succeeded. Notify RAF thread.
end_sender.send(Ok((self.depth_near.get(), self.depth_far.get()))).unwrap();
},
VRFrameDataStatus::Exit | VRFrameDataStatus::Waiting => {
// ExitPresent called or some error ocurred.
// Notify VRDisplay RAF thread to stop.
end_sender.send(Err(())).unwrap();
}
}
}
}
struct NotifyDisplayRAF {
address: Trusted<VRDisplay>,
sender: mpsc::Sender<Result<(f64, f64), ()>>
}
impl Runnable for NotifyDisplayRAF {
fn name(&self) -> &'static str { "NotifyDisplayRAF" }
fn handler(self: Box<Self>) {
let display = self.address.root();
display.handle_raf(&self.sender);
}
}
// WebVR Spect: If the number of values in the leftBounds/rightBounds arrays
// is not 0 or 4 for any of the passed layers the promise is rejected
fn parse_bounds(src: &Option<Vec<Finite<f32>>>, dst: &mut [f32; 4]) -> Result<(), &'static str> {
match *src {
Some(ref values) => {
if values.len() == 0 {
return Ok(())
}
if values.len() != 4 {
return Err("The number of values in the leftBounds/rightBounds arrays must be 0 or 4")
}
for i in 0..4 {
dst[i] = *values[i].deref();
}
Ok(())
},
None => Ok(())
}
}
fn validate_layer(cx: *mut JSContext,
layer: &VRLayer)
-> Result<(WebVRLayer, Root<WebGLRenderingContext>), &'static str> {
let ctx = layer.source.as_ref().map(|ref s| s.get_or_init_webgl_context(cx, None)).unwrap_or(None);
if let Some(ctx) = ctx {
let mut data = WebVRLayer::default();
try!(parse_bounds(&layer.leftBounds, &mut data.left_bounds));
try!(parse_bounds(&layer.rightBounds, &mut data.right_bounds));
Ok((data, ctx))
} else {
Err("VRLayer source must be a WebGL Context")
}
}

View file

@ -0,0 +1,62 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRDisplayCapabilitiesBinding;
use dom::bindings::codegen::Bindings::VRDisplayCapabilitiesBinding::VRDisplayCapabilitiesMethods;
use dom::bindings::js::Root;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use webvr_traits::WebVRDisplayCapabilities;
#[dom_struct]
pub struct VRDisplayCapabilities {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
capabilities: DOMRefCell<WebVRDisplayCapabilities>
}
unsafe_no_jsmanaged_fields!(WebVRDisplayCapabilities);
impl VRDisplayCapabilities {
fn new_inherited(capabilities: WebVRDisplayCapabilities) -> VRDisplayCapabilities {
VRDisplayCapabilities {
reflector_: Reflector::new(),
capabilities: DOMRefCell::new(capabilities)
}
}
pub fn new(capabilities: WebVRDisplayCapabilities, global: &GlobalScope) -> Root<VRDisplayCapabilities> {
reflect_dom_object(box VRDisplayCapabilities::new_inherited(capabilities),
global,
VRDisplayCapabilitiesBinding::Wrap)
}
}
impl VRDisplayCapabilitiesMethods for VRDisplayCapabilities {
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasposition
fn HasPosition(&self) -> bool {
self.capabilities.borrow().has_position
}
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasorientation
fn HasOrientation(&self) -> bool {
self.capabilities.borrow().has_orientation
}
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasexternaldisplay
fn HasExternalDisplay(&self) -> bool {
self.capabilities.borrow().has_external_display
}
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-canpresent
fn CanPresent(&self) -> bool {
self.capabilities.borrow().can_present
}
// https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-maxlayers
fn MaxLayers(&self) -> u32 {
if self.CanPresent() { 1 } else { 0 }
}
}

View file

@ -0,0 +1,116 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::VRDisplayEventBinding;
use dom::bindings::codegen::Bindings::VRDisplayEventBinding::VRDisplayEventMethods;
use dom::bindings::codegen::Bindings::VRDisplayEventBinding::VRDisplayEventReason;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::event::Event;
use dom::globalscope::GlobalScope;
use dom::vrdisplay::VRDisplay;
use dom::window::Window;
use servo_atoms::Atom;
use webvr_traits::{WebVRDisplayEvent, WebVRDisplayEventReason};
#[dom_struct]
pub struct VRDisplayEvent {
event: Event,
display: JS<VRDisplay>,
reason: Option<VRDisplayEventReason>
}
impl VRDisplayEvent {
fn new_inherited(display: &VRDisplay,
reason: Option<VRDisplayEventReason>)
-> VRDisplayEvent {
VRDisplayEvent {
event: Event::new_inherited(),
display: JS::from_ref(display),
reason: reason.clone()
}
}
pub fn new(global: &GlobalScope,
type_: Atom,
bubbles: bool,
cancelable: bool,
display: &VRDisplay,
reason: Option<VRDisplayEventReason>)
-> Root<VRDisplayEvent> {
let ev = reflect_dom_object(box VRDisplayEvent::new_inherited(&display, reason),
global,
VRDisplayEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev
}
pub fn new_from_webvr(global: &GlobalScope,
display: &VRDisplay,
event: &WebVRDisplayEvent)
-> Root<VRDisplayEvent> {
let (name, reason) = match *event {
WebVRDisplayEvent::Connect(_) => ("displayconnect", None),
WebVRDisplayEvent::Disconnect(_) => ("displaydisconnect", None),
WebVRDisplayEvent::Activate(_, reason) => ("activate", Some(reason)),
WebVRDisplayEvent::Deactivate(_, reason) => ("deactivate", Some(reason)),
WebVRDisplayEvent::Blur(_) => ("blur", None),
WebVRDisplayEvent::Focus(_) => ("focus", None),
WebVRDisplayEvent::PresentChange(_, _) => ("presentchange", None),
WebVRDisplayEvent::Change(_) => panic!("VRDisplayEvent:Change event not available in WebVR")
};
// map to JS enum values
let reason = reason.map(|r| {
match r {
WebVRDisplayEventReason::Navigation => VRDisplayEventReason::Navigation,
WebVRDisplayEventReason::Mounted => VRDisplayEventReason::Mounted,
WebVRDisplayEventReason::Unmounted => VRDisplayEventReason::Unmounted,
}
});
VRDisplayEvent::new(&global,
Atom::from(DOMString::from(name)),
false,
false,
&display,
reason)
}
pub fn Constructor(window: &Window,
type_: DOMString,
init: &VRDisplayEventBinding::VRDisplayEventInit)
-> Fallible<Root<VRDisplayEvent>> {
Ok(VRDisplayEvent::new(&window.global(),
Atom::from(type_),
init.parent.bubbles,
init.parent.cancelable,
&init.display,
init.reason))
}
}
impl VRDisplayEventMethods for VRDisplayEvent {
// https://w3c.github.io/webvr/#dom-vrdisplayevent-display
fn Display(&self) -> Root<VRDisplay> {
Root::from_ref(&*self.display)
}
// https://w3c.github.io/webvr/#enumdef-vrdisplayeventreason
fn GetReason(&self) -> Option<VRDisplayEventReason> {
self.reason
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}

View file

@ -0,0 +1,75 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VREyeParametersBinding;
use dom::bindings::codegen::Bindings::VREyeParametersBinding::VREyeParametersMethods;
use dom::bindings::conversions::slice_to_array_buffer_view;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use dom::vrfieldofview::VRFieldOfView;
use js::jsapi::{Heap, JSContext, JSObject};
use std::default::Default;
use webvr_traits::WebVREyeParameters;
#[dom_struct]
pub struct VREyeParameters {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
parameters: DOMRefCell<WebVREyeParameters>,
offset: Heap<*mut JSObject>,
fov: JS<VRFieldOfView>,
}
unsafe_no_jsmanaged_fields!(WebVREyeParameters);
impl VREyeParameters {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new_inherited(parameters: WebVREyeParameters, global: &GlobalScope) -> VREyeParameters {
let fov = VRFieldOfView::new(&global, parameters.field_of_view.clone());
let mut result = VREyeParameters {
reflector_: Reflector::new(),
parameters: DOMRefCell::new(parameters),
offset: Heap::default(),
fov: JS::from_ref(&*fov)
};
unsafe {
result.offset.set(slice_to_array_buffer_view(global.get_cx(), &result.parameters.borrow().offset));
}
result
}
pub fn new(parameters: WebVREyeParameters, global: &GlobalScope) -> Root<VREyeParameters> {
reflect_dom_object(box VREyeParameters::new_inherited(parameters, global),
global,
VREyeParametersBinding::Wrap)
}
}
impl VREyeParametersMethods for VREyeParameters {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vreyeparameters-offset
unsafe fn Offset(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.offset.get())
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-fieldofview
fn FieldOfView(&self) -> Root<VRFieldOfView> {
Root::from_ref(&*self.fov)
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderwidth
fn RenderWidth(&self) -> u32 {
self.parameters.borrow().render_width
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderheight
fn RenderHeight(&self) -> u32 {
self.parameters.borrow().render_height
}
}

View file

@ -0,0 +1,58 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRFieldOfViewBinding;
use dom::bindings::codegen::Bindings::VRFieldOfViewBinding::VRFieldOfViewMethods;
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use webvr_traits::WebVRFieldOfView;
#[dom_struct]
pub struct VRFieldOfView {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
fov: DOMRefCell<WebVRFieldOfView>
}
unsafe_no_jsmanaged_fields!(WebVRFieldOfView);
impl VRFieldOfView {
fn new_inherited(fov: WebVRFieldOfView) -> VRFieldOfView {
VRFieldOfView {
reflector_: Reflector::new(),
fov: DOMRefCell::new(fov)
}
}
pub fn new(global: &GlobalScope, fov: WebVRFieldOfView) -> Root<VRFieldOfView> {
reflect_dom_object(box VRFieldOfView::new_inherited(fov),
global,
VRFieldOfViewBinding::Wrap)
}
}
impl VRFieldOfViewMethods for VRFieldOfView {
// https://w3c.github.io/webvr/#interface-interface-vrfieldofview
fn UpDegrees(&self) -> Finite<f64> {
Finite::wrap(self.fov.borrow().up_degrees)
}
// https://w3c.github.io/webvr/#interface-interface-vrfieldofview
fn RightDegrees(&self) -> Finite<f64> {
Finite::wrap(self.fov.borrow().right_degrees)
}
// https://w3c.github.io/webvr/#interface-interface-vrfieldofview
fn DownDegrees(&self) -> Finite<f64> {
Finite::wrap(self.fov.borrow().down_degrees)
}
// https://w3c.github.io/webvr/#interface-interface-vrfieldofview
fn LeftDegrees(&self) -> Finite<f64> {
Finite::wrap(self.fov.borrow().left_degrees)
}
}

View file

@ -0,0 +1,122 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::codegen::Bindings::VRFrameDataBinding;
use dom::bindings::codegen::Bindings::VRFrameDataBinding::VRFrameDataMethods;
use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
use dom::bindings::error::Fallible;
use dom::bindings::js::{JS, Root};
use dom::bindings::num::Finite;
use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use dom::vrpose::VRPose;
use dom::window::Window;
use js::jsapi::{Heap, JSContext, JSObject};
use std::cell::Cell;
use webvr_traits::WebVRFrameData;
#[dom_struct]
pub struct VRFrameData {
reflector_: Reflector,
left_proj: Heap<*mut JSObject>,
left_view: Heap<*mut JSObject>,
right_proj: Heap<*mut JSObject>,
right_view: Heap<*mut JSObject>,
pose: JS<VRPose>,
timestamp: Cell<f64>,
first_timestamp: Cell<f64>
}
impl VRFrameData {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new(global: &GlobalScope) -> Root<VRFrameData> {
let matrix = [1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0f32];
let pose = VRPose::new(&global, &Default::default());
let mut framedata = VRFrameData {
reflector_: Reflector::new(),
left_proj: Heap::default(),
left_view: Heap::default(),
right_proj: Heap::default(),
right_view: Heap::default(),
pose: JS::from_ref(&*pose),
timestamp: Cell::new(0.0),
first_timestamp: Cell::new(0.0)
};
unsafe {
framedata.left_proj.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
framedata.left_view.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
framedata.right_proj.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
framedata.right_view.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
}
reflect_dom_object(box framedata,
global,
VRFrameDataBinding::Wrap)
}
pub fn Constructor(window: &Window) -> Fallible<Root<VRFrameData>> {
Ok(VRFrameData::new(&window.global()))
}
}
impl VRFrameData {
#[allow(unsafe_code)]
pub fn update(&self, data: &WebVRFrameData) {
unsafe {
update_array_buffer_view(self.left_proj.get(), &data.left_projection_matrix);
update_array_buffer_view(self.left_view.get(), &data.left_view_matrix);
update_array_buffer_view(self.right_proj.get(), &data.right_projection_matrix);
update_array_buffer_view(self.right_view.get(), &data.right_view_matrix);
}
self.pose.update(&data.pose);
self.timestamp.set(data.timestamp);
if self.first_timestamp.get() == 0.0 {
self.first_timestamp.set(data.timestamp);
}
}
}
impl VRFrameDataMethods for VRFrameData {
// https://w3c.github.io/webvr/#dom-vrframedata-timestamp
fn Timestamp(&self) -> Finite<f64> {
Finite::wrap(self.timestamp.get() - self.first_timestamp.get())
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrframedata-leftprojectionmatrix
unsafe fn LeftProjectionMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.left_proj.get())
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrframedata-leftviewmatrix
unsafe fn LeftViewMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.left_view.get())
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrframedata-rightprojectionmatrix
unsafe fn RightProjectionMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.right_proj.get())
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrframedata-rightviewmatrix
unsafe fn RightViewMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.right_view.get())
}
// https://w3c.github.io/webvr/#dom-vrframedata-pose
fn Pose(&self) -> Root<VRPose> {
Root::from_ref(&*self.pose)
}
}

View file

@ -0,0 +1,133 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRPoseBinding;
use dom::bindings::codegen::Bindings::VRPoseBinding::VRPoseMethods;
use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
use dom::bindings::js::Root;
use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use js::jsapi::{Heap, JSContext, JSObject};
use std::ptr;
use webvr_traits::webvr;
#[dom_struct]
pub struct VRPose {
reflector_: Reflector,
position: DOMRefCell<Heap<*mut JSObject>>,
orientation: DOMRefCell<Heap<*mut JSObject>>,
linear_vel: DOMRefCell<Heap<*mut JSObject>>,
angular_vel: DOMRefCell<Heap<*mut JSObject>>,
linear_acc: DOMRefCell<Heap<*mut JSObject>>,
angular_acc: DOMRefCell<Heap<*mut JSObject>>
}
#[allow(unsafe_code)]
unsafe fn update_or_create_typed_array(cx: *mut JSContext,
src: Option<&[f32]>,
dst: &DOMRefCell<Heap<*mut JSObject>>) {
let mut dst = dst.borrow_mut();
match src {
Some(ref data) => {
if dst.get().is_null() {
dst.set(slice_to_array_buffer_view(cx, &data));
} else {
update_array_buffer_view(dst.get(), &data);
}
},
None => {
if !dst.get().is_null() {
dst.set(ptr::null_mut());
}
}
}
}
#[inline]
#[allow(unsafe_code)]
fn heap_to_option(heap: &DOMRefCell<Heap<*mut JSObject>>) -> Option<NonZero<*mut JSObject>> {
let js_object = heap.borrow_mut().get();
if js_object.is_null() {
None
} else {
unsafe {
Some(NonZero::new(js_object))
}
}
}
impl VRPose {
fn new_inherited() -> VRPose {
VRPose {
reflector_: Reflector::new(),
position: DOMRefCell::new(Heap::default()),
orientation: DOMRefCell::new(Heap::default()),
linear_vel: DOMRefCell::new(Heap::default()),
angular_vel: DOMRefCell::new(Heap::default()),
linear_acc: DOMRefCell::new(Heap::default()),
angular_acc: DOMRefCell::new(Heap::default())
}
}
pub fn new(global: &GlobalScope, pose: &webvr::VRPose) -> Root<VRPose> {
let root = reflect_dom_object(box VRPose::new_inherited(),
global,
VRPoseBinding::Wrap);
root.update(&pose);
root
}
#[allow(unsafe_code)]
pub fn update(&self, pose: &webvr::VRPose) {
let cx = self.global().get_cx();
unsafe {
update_or_create_typed_array(cx, pose.position.as_ref().map(|v| &v[..]), &self.position);
update_or_create_typed_array(cx, pose.orientation.as_ref().map(|v| &v[..]), &self.orientation);
update_or_create_typed_array(cx, pose.linear_velocity.as_ref().map(|v| &v[..]), &self.linear_vel);
update_or_create_typed_array(cx, pose.angular_velocity.as_ref().map(|v| &v[..]), &self.angular_vel);
update_or_create_typed_array(cx, pose.linear_acceleration.as_ref().map(|v| &v[..]), &self.linear_acc);
update_or_create_typed_array(cx, pose.angular_acceleration.as_ref().map(|v| &v[..]), &self.angular_acc);
}
}
}
impl VRPoseMethods for VRPose {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-position
unsafe fn GetPosition(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.position)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-linearvelocity
unsafe fn GetLinearVelocity(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.linear_vel)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-linearacceleration
unsafe fn GetLinearAcceleration(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.linear_acc)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-orientation
unsafe fn GetOrientation(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.orientation)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-angularvelocity
unsafe fn GetAngularVelocity(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.angular_vel)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-angularacceleration
unsafe fn GetAngularAcceleration(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.angular_acc)
}
}

View file

@ -0,0 +1,75 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRStageParametersBinding;
use dom::bindings::codegen::Bindings::VRStageParametersBinding::VRStageParametersMethods;
use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use js::jsapi::{Heap, JSContext, JSObject};
use webvr_traits::WebVRStageParameters;
#[dom_struct]
pub struct VRStageParameters {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
parameters: DOMRefCell<WebVRStageParameters>,
transform: Heap<*mut JSObject>,
}
unsafe_no_jsmanaged_fields!(WebVRStageParameters);
impl VRStageParameters {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new_inherited(parameters: WebVRStageParameters, global: &GlobalScope) -> VRStageParameters {
let mut stage = VRStageParameters {
reflector_: Reflector::new(),
parameters: DOMRefCell::new(parameters),
transform: Heap::default()
};
unsafe {
stage.transform.set(slice_to_array_buffer_view(global.get_cx(),
&stage.parameters.borrow().sitting_to_standing_transform));
}
stage
}
pub fn new(parameters: WebVRStageParameters, global: &GlobalScope) -> Root<VRStageParameters> {
reflect_dom_object(box VRStageParameters::new_inherited(parameters, global),
global,
VRStageParametersBinding::Wrap)
}
#[allow(unsafe_code)]
pub fn update(&self, parameters: &WebVRStageParameters) {
unsafe {
update_array_buffer_view(self.transform.get(), &parameters.sitting_to_standing_transform);
}
*self.parameters.borrow_mut() = parameters.clone();
}
}
impl VRStageParametersMethods for VRStageParameters {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrstageparameters-sittingtostandingtransform
unsafe fn SittingToStandingTransform(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.transform.get())
}
// https://w3c.github.io/webvr/#dom-vrstageparameters-sizex
fn SizeX(&self) -> Finite<f32> {
Finite::wrap(self.parameters.borrow().size_x)
}
// https://w3c.github.io/webvr/#dom-vrstageparameters-sizez
fn SizeZ(&self) -> Finite<f32> {
Finite::wrap(self.parameters.borrow().size_z)
}
}

View file

@ -57,3 +57,8 @@ interface NavigatorPlugins {
interface NavigatorCookies {
readonly attribute boolean cookieEnabled;
};
// https://w3c.github.io/webvr/#interface-navigator
partial interface Navigator {
[SameObject, Pref="dom.webvr.enabled"] readonly attribute VR vr;
};

View file

@ -0,0 +1,10 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-navigator
[Pref="dom.webvr.enabled"]
interface VR: EventTarget {
Promise<sequence<VRDisplay>> getDisplays();
//readonly attribute FrozenArray<VRDisplay> activeVRDisplays;
};

View file

@ -0,0 +1,131 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
enum VREye {
"left",
"right"
};
// https://w3c.github.io/webvr/#interface-vrdisplay
[Pref="dom.webvr.enabled"]
interface VRDisplay : EventTarget {
readonly attribute boolean isConnected;
readonly attribute boolean isPresenting;
/**
* Dictionary of capabilities describing the VRDisplay.
*/
[SameObject] readonly attribute VRDisplayCapabilities capabilities;
/**
* If this VRDisplay supports room-scale experiences, the optional
* stage attribute contains details on the room-scale parameters.
* The stageParameters attribute can not change between null
* and non-null once the VRDisplay is enumerated; however,
* the values within VRStageParameters may change after
* any call to VRDisplay.submitFrame as the user may re-configure
* their environment at any time.
*/
readonly attribute VRStageParameters? stageParameters;
/**
* Return the current VREyeParameters for the given eye.
*/
VREyeParameters getEyeParameters(VREye whichEye);
/**
* An identifier for this distinct VRDisplay. Used as an
* association point in the Gamepad API.
*/
readonly attribute unsigned long displayId;
/**
* A display name, a user-readable name identifying it.
*/
readonly attribute DOMString displayName;
/**
* Populates the passed VRFrameData with the information required to render
* the current frame.
*/
boolean getFrameData(VRFrameData frameData);
/**
* Return a VRPose containing the future predicted pose of the VRDisplay
* when the current frame will be presented. The value returned will not
* change until JavaScript has returned control to the browser.
*
* The VRPose will contain the position, orientation, velocity,
* and acceleration of each of these properties.
*/
[NewObject] VRPose getPose();
/**
* Reset the pose for this display, treating its current position and
* orientation as the "origin/zero" values. VRPose.position,
* VRPose.orientation, and VRStageParameters.sittingToStandingTransform may be
* updated when calling resetPose(). This should be called in only
* sitting-space experiences.
*/
void resetPose();
/**
* z-depth defining the near plane of the eye view frustum
* enables mapping of values in the render target depth
* attachment to scene coordinates. Initially set to 0.01.
*/
attribute double depthNear;
/**
* z-depth defining the far plane of the eye view frustum
* enables mapping of values in the render target depth
* attachment to scene coordinates. Initially set to 10000.0.
*/
attribute double depthFar;
/**
* The callback passed to `requestAnimationFrame` will be called
* any time a new frame should be rendered. When the VRDisplay is
* presenting the callback will be called at the native refresh
* rate of the HMD. When not presenting this function acts
* identically to how window.requestAnimationFrame acts. Content should
* make no assumptions of frame rate or vsync behavior as the HMD runs
* asynchronously from other displays and at differing refresh rates.
*/
unsigned long requestAnimationFrame(FrameRequestCallback callback);
/**
* Passing the value returned by `requestAnimationFrame` to
* `cancelAnimationFrame` will unregister the callback.
*/
void cancelAnimationFrame(unsigned long handle);
/**
* Begin presenting to the VRDisplay. Must be called in response to a user gesture.
* Repeat calls while already presenting will update the VRLayers being displayed.
* If the number of values in the leftBounds/rightBounds arrays is not 0 or 4 for
* any of the passed layers the promise is rejected.
* If the source of any of the layers is not present (null), the promise is rejected.
*/
Promise<void> requestPresent(sequence<VRLayer> layers);
/**
* Stops presenting to the VRDisplay.
*/
Promise<void> exitPresent();
/**
* Get the layers currently being presented.
*/
//sequence<VRLayer> getLayers();
/**
* The VRLayer provided to the VRDisplay will be captured and presented
* in the HMD. Calling this function has the same effect on the source
* canvas as any other operation that uses its source image, and canvases
* created without preserveDrawingBuffer set to true will be cleared.
*/
void submitFrame();
};

View file

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrdisplaycapabilities
[Pref="dom.webvr.enabled"]
interface VRDisplayCapabilities {
readonly attribute boolean hasPosition;
readonly attribute boolean hasOrientation;
readonly attribute boolean hasExternalDisplay;
readonly attribute boolean canPresent;
readonly attribute unsigned long maxLayers;
};

View file

@ -0,0 +1,23 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrdisplayevent
enum VRDisplayEventReason {
"navigation",
"mounted",
"unmounted",
"requested"
};
[Pref="dom.webvr.enabled", Constructor(DOMString type, VRDisplayEventInit eventInitDict)]
interface VRDisplayEvent : Event {
readonly attribute VRDisplay display;
readonly attribute VRDisplayEventReason? reason;
};
dictionary VRDisplayEventInit : EventInit {
required VRDisplay display;
VRDisplayEventReason reason;
};

View file

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vreyeparameters
[Pref="dom.webvr.enabled"]
interface VREyeParameters {
readonly attribute Float32Array offset;
[SameObject] readonly attribute VRFieldOfView fieldOfView;
readonly attribute unsigned long renderWidth;
readonly attribute unsigned long renderHeight;
};

View file

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrfieldofview
[Pref="dom.webvr.enabled"]
interface VRFieldOfView {
readonly attribute double upDegrees;
readonly attribute double rightDegrees;
readonly attribute double downDegrees;
readonly attribute double leftDegrees;
};

View file

@ -0,0 +1,15 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrframedata
[Pref="dom.webvr.enabled", Constructor]
interface VRFrameData {
readonly attribute DOMHighResTimeStamp timestamp;
readonly attribute Float32Array leftProjectionMatrix;
readonly attribute Float32Array leftViewMatrix;
readonly attribute Float32Array rightProjectionMatrix;
readonly attribute Float32Array rightViewMatrix;
readonly attribute VRPose pose;
};

View file

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrlayer
//typedef (HTMLCanvasElement or OffscreenCanvas) VRSource;
dictionary VRLayer {
HTMLCanvasElement source;
sequence<float> leftBounds;
sequence<float> rightBounds;
};

View file

@ -0,0 +1,14 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrpose
[Pref="dom.webvr.enabled"]
interface VRPose {
readonly attribute Float32Array? position;
readonly attribute Float32Array? linearVelocity;
readonly attribute Float32Array? linearAcceleration;
readonly attribute Float32Array? orientation;
readonly attribute Float32Array? angularVelocity;
readonly attribute Float32Array? angularAcceleration;
};

View file

@ -0,0 +1,11 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://w3c.github.io/webvr/#interface-vrstageparameters
[Pref="dom.webvr.enabled"]
interface VRStageParameters {
readonly attribute Float32Array sittingToStandingTransform;
readonly attribute float sizeX;
readonly attribute float sizeZ;
};

View file

@ -110,6 +110,7 @@ use timers::{IsInterval, TimerCallback};
use tinyfiledialogs::{self, MessageBoxIcon};
use url::Position;
use webdriver_handlers::jsval_to_webdriver;
use webvr_traits::WebVRMsg;
/// Current state of the window object
#[derive(JSTraceable, Copy, Clone, Debug, PartialEq, HeapSizeOf)]
@ -241,6 +242,10 @@ pub struct Window {
media_query_lists: WeakMediaQueryListVec,
test_runner: MutNullableJS<TestRunner>,
/// A handle for communicating messages to the webvr thread, if available.
#[ignore_heap_size_of = "channels are hard"]
webvr_thread: Option<IpcSender<WebVRMsg>>
}
impl Window {
@ -321,6 +326,10 @@ impl Window {
pub fn current_viewport(&self) -> Rect<Au> {
self.current_viewport.clone().get()
}
pub fn webvr_thread(&self) -> Option<IpcSender<WebVRMsg>> {
self.webvr_thread.clone()
}
}
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
@ -1590,7 +1599,8 @@ impl Window {
layout_chan: Sender<Msg>,
id: PipelineId,
parent_info: Option<(PipelineId, FrameType)>,
window_size: Option<WindowSizeData>)
window_size: Option<WindowSizeData>,
webvr_thread: Option<IpcSender<WebVRMsg>>)
-> Root<Window> {
let layout_rpc: Box<LayoutRPC + Send> = {
let (rpc_send, rpc_recv) = channel();
@ -1654,6 +1664,7 @@ impl Window {
scroll_offsets: DOMRefCell::new(HashMap::new()),
media_query_lists: WeakMediaQueryListVec::new(),
test_runner: Default::default(),
webvr_thread: webvr_thread
};
unsafe {

View file

@ -98,6 +98,7 @@ extern crate url;
extern crate uuid;
extern crate webrender_traits;
extern crate websocket;
extern crate webvr_traits;
extern crate xml5ever;
mod body;

View file

@ -80,6 +80,7 @@ pub enum ScriptThreadEventCategory {
ServiceWorkerEvent,
EnterFullscreen,
ExitFullscreen,
WebVREvent
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM

View file

@ -90,6 +90,7 @@ use script_traits::{ScriptThreadFactory, TimerEvent, TimerEventRequest, TimerSou
use script_traits::{TouchEventType, TouchId, UntrustedNodeAddress, WindowSizeData, WindowSizeType};
use script_traits::CompositorEvent::{KeyEvent, MouseButtonEvent, MouseMoveEvent, ResizeEvent};
use script_traits::CompositorEvent::{TouchEvent, TouchpadPressureEvent};
use script_traits::WebVREventMsg;
use script_traits::webdriver_msg::WebDriverScriptCommand;
use serviceworkerjob::{Job, JobQueue, AsyncJobHandler, FinishJobHandler, InvokeType, SettleType};
use servo_config::opts;
@ -116,6 +117,7 @@ use task_source::user_interaction::{UserInteractionTask, UserInteractionTaskSour
use time::Tm;
use url::Position;
use webdriver_handlers;
use webvr_traits::WebVRMsg;
thread_local!(pub static STACK_ROOTS: Cell<Option<RootCollectionPtr>> = Cell::new(None));
thread_local!(static SCRIPT_THREAD_ROOT: Cell<Option<*const ScriptThread>> = Cell::new(None));
@ -477,6 +479,9 @@ pub struct ScriptThread {
content_process_shutdown_chan: IpcSender<()>,
promise_job_queue: PromiseJobQueue,
/// A handle to the webvr thread, if available
webvr_thread: Option<IpcSender<WebVRMsg>>,
}
/// In the event of thread panic, all data on the stack runs its destructor. However, there
@ -699,6 +704,8 @@ impl ScriptThread {
promise_job_queue: PromiseJobQueue::new(),
layout_to_constellation_chan: state.layout_to_constellation_chan,
webvr_thread: state.webvr_thread
}
}
@ -945,6 +952,7 @@ impl ScriptThread {
ScriptThreadEventCategory::SetViewport => ProfilerCategory::ScriptSetViewport,
ScriptThreadEventCategory::TimerEvent => ProfilerCategory::ScriptTimerEvent,
ScriptThreadEventCategory::WebSocketEvent => ProfilerCategory::ScriptWebSocketEvent,
ScriptThreadEventCategory::WebVREvent => ProfilerCategory::ScriptWebVREvent,
ScriptThreadEventCategory::WorkerEvent => ProfilerCategory::ScriptWorkerEvent,
ScriptThreadEventCategory::ServiceWorkerEvent => ProfilerCategory::ScriptServiceWorkerEvent,
ScriptThreadEventCategory::EnterFullscreen => ProfilerCategory::ScriptEnterFullscreen,
@ -1009,6 +1017,8 @@ impl ScriptThread {
self.handle_reload(pipeline_id),
ConstellationControlMsg::ExitPipeline(pipeline_id, discard_browsing_context) =>
self.handle_exit_pipeline_msg(pipeline_id, discard_browsing_context),
ConstellationControlMsg::WebVREvent(pipeline_id, event) =>
self.handle_webvr_event(pipeline_id, event),
msg @ ConstellationControlMsg::AttachLayout(..) |
msg @ ConstellationControlMsg::Viewport(..) |
msg @ ConstellationControlMsg::SetScrollState(..) |
@ -1751,7 +1761,8 @@ impl ScriptThread {
incomplete.layout_chan,
incomplete.pipeline_id,
incomplete.parent_info,
incomplete.window_size);
incomplete.window_size,
self.webvr_thread.clone());
let frame_element = frame_element.r().map(Castable::upcast);
let browsing_context = BrowsingContext::new(&window, frame_element);
@ -2212,6 +2223,14 @@ impl ScriptThread {
}
}
fn handle_webvr_event(&self, pipeline_id: PipelineId, event: WebVREventMsg) {
let window = self.documents.borrow().find_window(pipeline_id);
if let Some(window) = window {
let navigator = window.Navigator();
navigator.handle_webvr_event(event);
}
}
pub fn enqueue_promise_job(job: EnqueuedPromiseCallback, global: &GlobalScope) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };

View file

@ -35,3 +35,4 @@ servo_url = {path = "../url", features = ["servo"]}
style_traits = {path = "../style_traits", features = ["servo"]}
time = "0.1.12"
url = {version = "1.2", features = ["heap_size"]}
webvr_traits = {path = "../webvr_traits"}

View file

@ -35,6 +35,7 @@ extern crate serde_derive;
extern crate servo_url;
extern crate style_traits;
extern crate time;
extern crate webvr_traits;
mod script_msg;
pub mod webdriver_msg;
@ -71,6 +72,7 @@ use std::fmt;
use std::sync::mpsc::{Receiver, Sender};
use style_traits::{PagePx, UnsafeNode, ViewportPx};
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
use webvr_traits::{WebVRDisplayEvent, WebVRMsg};
pub use script_msg::{LayoutMsg, ScriptMsg, EventResult, LogEntry};
pub use script_msg::{ServiceWorkerMsg, ScopeThings, SWManagerMsg, SWManagerSenders, DOMMessage};
@ -263,6 +265,8 @@ pub enum ConstellationControlMsg {
ReportCSSError(PipelineId, String, usize, usize, String),
/// Reload the given page.
Reload(PipelineId),
/// Notifies the script thread of a WebVR device event
WebVREvent(PipelineId, WebVREventMsg)
}
impl fmt::Debug for ConstellationControlMsg {
@ -295,6 +299,7 @@ impl fmt::Debug for ConstellationControlMsg {
FramedContentChanged(..) => "FramedContentChanged",
ReportCSSError(..) => "ReportCSSError",
Reload(..) => "Reload",
WebVREvent(..) => "WebVREvent",
};
write!(formatter, "ConstellationMsg::{}", variant)
}
@ -478,6 +483,8 @@ pub struct InitialScriptState {
pub pipeline_namespace_id: PipelineNamespaceId,
/// A ping will be sent on this channel once the script thread shuts down.
pub content_process_shutdown_chan: IpcSender<()>,
/// A channel to the webvr thread, if available.
pub webvr_thread: Option<IpcSender<WebVRMsg>>
}
/// This trait allows creating a `ScriptThread` without depending on the `script`
@ -716,6 +723,18 @@ pub enum ConstellationMsg {
Reload,
/// A log entry, with the top-level frame id and thread name
LogEntry(Option<FrameId>, Option<String>, LogEntry),
/// Set the WebVR thread channel.
SetWebVRThread(IpcSender<WebVRMsg>),
/// Dispatch a WebVR event to the subscribed script threads.
WebVREvent(Vec<PipelineId>, WebVREventMsg),
}
/// Messages to the constellation originating from the WebVR thread.
/// Used to dispatch VR Headset state events: connected, unconnected, and more.
#[derive(Deserialize, Serialize, Clone)]
pub enum WebVREventMsg {
/// Inform the constellation of a VR display event.
DisplayEvent(WebVRDisplayEvent)
}
/// Resources required by workerglobalscopes

View file

@ -54,6 +54,8 @@ servo_config = {path = "../config"}
servo_url = {path = "../url"}
style = {path = "../style", features = ["servo"]}
url = "1.2"
webvr = {path = "../webvr"}
webvr_traits = {path = "../webvr_traits"}
webdriver_server = {path = "../webdriver_server", optional = true}
[dependencies.webrender]

View file

@ -48,6 +48,8 @@ pub extern crate script_layout_interface;
pub extern crate servo_config;
pub extern crate servo_url;
pub extern crate style;
pub extern crate webvr;
pub extern crate webvr_traits;
#[cfg(feature = "webdriver")]
extern crate webdriver_server;
@ -96,6 +98,7 @@ use std::cmp::max;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use webvr::{WebVRThread, WebVRCompositorHandler};
pub use gleam::gl;
pub use servo_config as config;
@ -193,6 +196,7 @@ impl<Window> Browser<Window> where Window: WindowMethods + 'static {
debugger_chan,
devtools_chan,
supports_clipboard,
&webrender,
webrender_api_sender.clone());
// Send the constellation's swmanager sender to service worker manager thread
@ -260,6 +264,7 @@ fn create_constellation(user_agent: Cow<'static, str>,
debugger_chan: Option<debugger::Sender>,
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
supports_clipboard: bool,
webrender: &webrender::Renderer,
webrender_api_sender: webrender_traits::RenderApiSender)
-> (Sender<ConstellationMsg>, SWManagerSenders) {
let bluetooth_thread: IpcSender<BluetoothRequest> = BluetoothThreadFactory::new();
@ -295,6 +300,16 @@ fn create_constellation(user_agent: Cow<'static, str>,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>::start(initial_state);
if PREFS.is_webvr_enabled() {
// WebVR initialization
let (mut handler, sender) = WebVRCompositorHandler::new();
let webvr_thread = WebVRThread::spawn(constellation_chan.clone(), sender);
handler.set_webvr_thread_sender(webvr_thread.clone());
webrender.set_vr_compositor_handler(handler);
constellation_chan.send(ConstellationMsg::SetWebVRThread(webvr_thread)).unwrap();
}
if let Some(url) = url {
constellation_chan.send(ConstellationMsg::InitLoadUrl(url)).unwrap();
};

View file

@ -0,0 +1,23 @@
[package]
name = "webvr"
version = "0.0.1"
authors = ["The Servo Project Developers"]
license = "MPL-2.0"
publish = false
[lib]
name = "webvr"
path = "lib.rs"
[dependencies]
ipc-channel = "0.5"
log = "0.3"
msg = {path = "../msg"}
script_traits = {path = "../script_traits"}
servo_config = {path = "../config"}
webvr_traits = {path = "../webvr_traits" }
[dependencies.webrender_traits]
git = "https://github.com/servo/webrender"
default_features = false
features = ["serde_derive"]

19
components/webvr/lib.rs Normal file
View file

@ -0,0 +1,19 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(custom_derive)]
#![feature(plugin)]
#![deny(unsafe_code)]
extern crate ipc_channel;
#[macro_use]
extern crate log;
extern crate msg;
extern crate script_traits;
extern crate servo_config;
extern crate webrender_traits;
extern crate webvr_traits;
mod webvr_thread;
pub use webvr_thread::{WebVRThread, WebVRCompositorHandler};

View file

@ -0,0 +1,377 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use ipc_channel::ipc;
use ipc_channel::ipc::{IpcReceiver, IpcSender};
use msg::constellation_msg::PipelineId;
use script_traits::{ConstellationMsg, WebVREventMsg};
use servo_config::prefs::PREFS;
use std::{thread, time};
use std::collections::{HashMap, HashSet};
use std::sync::mpsc;
use std::sync::mpsc::{Receiver, Sender};
use webrender_traits;
use webvr_traits::{WebVRMsg, WebVRResult};
use webvr_traits::webvr::*;
/// WebVRThread owns native VRDisplays, handles their life cycle inside Servo and
/// acts a doorman for untrusted VR requests from DOM Objects. These are the key components
/// * WebVRThread::spawn() creates a long living thread that waits for VR Commands from DOM objects
/// and handles them in its trusted thread. The back and forth comunication with DOM is implemented
/// using IPC-channels. This thread creates the VRServiceManager instance, which handles the life cycle
/// of all VR Vendor SDKs and owns all the native VRDisplays. These displays are guaranteed to live while
/// the spawned thread is alive. The WebVRThread is unique and it's closed using the Exit message when the
/// whole browser is going to be closed.
/// * A Event Polling thread is created in order to implement WebVR Events (connected, disconnected,..).
/// This thread wakes up the WebVRThread from time to time by sending a PollEvents message. This thread
/// is only created when there is at least one live JavaScript context using the WebVR APIs and shuts down it when
/// the tab is closed. A single instance of the thread is used to handle multiple JavaScript contexts.
/// Constellation channel is used to notify events to the Script Thread.
/// * When the WeVR APIs are used in a tab, it's pipeline_id is registered using the RegisterContext message. When
/// the tab is closed, UnregisterContext message is sent. This way the WebVR thread has a list of the pipeline
/// ids using the WebVR APIs. These ids are used to implement privacy guidelines defined in the WebVR Spec.
/// * When a JavaScript thread gains access to present to a headset, WebVRThread is not used as a intermediary in
/// the VRDisplay.requestAnimationFrame loop in order to minimize latency. A direct communication with WebRender
/// is used instead. See WebVRCompositorHandler and the VRCompositorCommanda for more details.
pub struct WebVRThread {
receiver: IpcReceiver<WebVRMsg>,
sender: IpcSender<WebVRMsg>,
service: VRServiceManager,
contexts: HashSet<PipelineId>,
constellation_chan: Sender<ConstellationMsg>,
vr_compositor_chan: WebVRCompositorSender,
polling_events: bool,
presenting: HashMap<u64, PipelineId>
}
impl WebVRThread {
fn new(receiver: IpcReceiver<WebVRMsg>,
sender: IpcSender<WebVRMsg>,
constellation_chan: Sender<ConstellationMsg>,
vr_compositor_chan: WebVRCompositorSender)
-> WebVRThread {
let mut service = VRServiceManager::new();
service.register_defaults();
WebVRThread {
receiver: receiver,
sender: sender,
service: service,
contexts: HashSet::new(),
constellation_chan: constellation_chan,
vr_compositor_chan: vr_compositor_chan,
polling_events: false,
presenting: HashMap::new()
}
}
pub fn spawn(constellation_chan: Sender<ConstellationMsg>,
vr_compositor_chan: WebVRCompositorSender)
-> IpcSender<WebVRMsg> {
let (sender, receiver) = ipc::channel().unwrap();
let sender_clone = sender.clone();
thread::Builder::new().name("WebVRThread".into()).spawn(move || {
WebVRThread::new(receiver, sender_clone, constellation_chan, vr_compositor_chan).start();
}).expect("Thread spawning failed");
sender
}
fn start(&mut self) {
while let Ok(msg) = self.receiver.recv() {
match msg {
WebVRMsg::RegisterContext(context) => {
self.handle_register_context(context);
self.schedule_poll_events();
},
WebVRMsg::UnregisterContext(context) => {
self.handle_unregister_context(context);
},
WebVRMsg::PollEvents(sender) => {
self.poll_events(sender);
},
WebVRMsg::GetDisplays(sender) => {
self.handle_get_displays(sender);
self.schedule_poll_events();
},
WebVRMsg::GetFrameData(pipeline_id, display_id, near, far, sender) => {
self.handle_framedata(pipeline_id, display_id, near, far, sender);
},
WebVRMsg::ResetPose(pipeline_id, display_id, sender) => {
self.handle_reset_pose(pipeline_id, display_id, sender);
},
WebVRMsg::RequestPresent(pipeline_id, display_id, sender) => {
self.handle_request_present(pipeline_id, display_id, sender);
},
WebVRMsg::ExitPresent(pipeline_id, display_id, sender) => {
self.handle_exit_present(pipeline_id, display_id, sender);
},
WebVRMsg::CreateCompositor(display_id) => {
self.handle_create_compositor(display_id);
},
WebVRMsg::Exit => {
break
},
}
}
}
fn handle_register_context(&mut self, ctx: PipelineId) {
self.contexts.insert(ctx);
}
fn handle_unregister_context(&mut self, ctx: PipelineId) {
self.contexts.remove(&ctx);
}
fn handle_get_displays(&mut self, sender: IpcSender<WebVRResult<Vec<VRDisplayData>>>) {
let displays = self.service.get_displays();
let mut result = Vec::new();
for display in displays {
result.push(display.borrow().data());
}
sender.send(Ok(result)).unwrap();
}
fn handle_framedata(&mut self,
pipeline: PipelineId,
display_id: u64,
near: f64,
far: f64,
sender: IpcSender<WebVRResult<VRFrameData>>) {
match self.access_check(pipeline, display_id) {
Ok(display) => {
sender.send(Ok(display.borrow().inmediate_frame_data(near, far))).unwrap()
},
Err(msg) => sender.send(Err(msg.into())).unwrap()
}
}
fn handle_reset_pose(&mut self,
pipeline: PipelineId,
display_id: u64,
sender: IpcSender<WebVRResult<VRDisplayData>>) {
match self.access_check(pipeline, display_id) {
Ok(display) => {
display.borrow_mut().reset_pose();
sender.send(Ok(display.borrow().data())).unwrap();
},
Err(msg) => {
sender.send(Err(msg.into())).unwrap()
}
}
}
// This method implements the privacy and security guidelines defined in the WebVR spec.
// For example a secondary tab is not allowed to read VRDisplay data or stop a VR presentation
// while the user is having a VR experience in the current tab.
// These security rules also avoid multithreading race conditions between WebVRThread and
// Webrender thread. See WebVRCompositorHandler implementation notes for more details about this.
fn access_check(&self, pipeline: PipelineId, display_id: u64) -> Result<&VRDisplayPtr, &'static str> {
if *self.presenting.get(&display_id).unwrap_or(&pipeline) != pipeline {
return Err("No access granted to this Display because it's presenting on other JavaScript Tab");
}
self.service.get_display(display_id).ok_or("Device not found")
}
fn handle_request_present(&mut self,
pipeline: PipelineId,
display_id: u64,
sender: IpcSender<WebVRResult<()>>) {
match self.access_check(pipeline, display_id).map(|d| d.clone()) {
Ok(display) => {
self.presenting.insert(display_id, pipeline);
let data = display.borrow().data();
sender.send(Ok(())).unwrap();
self.notify_event(VRDisplayEvent::PresentChange(data, true));
},
Err(msg) => {
sender.send(Err(msg.into())).unwrap();
}
}
}
fn handle_exit_present(&mut self,
pipeline: PipelineId,
display_id: u64,
sender: Option<IpcSender<WebVRResult<()>>>) {
match self.access_check(pipeline, display_id).map(|d| d.clone()) {
Ok(display) => {
self.presenting.remove(&display_id);
if let Some(sender) = sender {
sender.send(Ok(())).unwrap();
}
let data = display.borrow().data();
self.notify_event(VRDisplayEvent::PresentChange(data, false));
},
Err(msg) => {
if let Some(sender) = sender {
sender.send(Err(msg.into())).unwrap();
}
}
}
}
fn handle_create_compositor(&mut self, display_id: u64) {
let compositor = self.service.get_display(display_id).map(|d| WebVRCompositor(d.as_ptr()));
self.vr_compositor_chan.send(compositor).unwrap();
}
fn poll_events(&mut self, sender: IpcSender<bool>) {
loop {
let events = self.service.poll_events();
if events.is_empty() {
break;
}
self.notify_events(events)
}
// Stop polling events if the callers are not using VR
self.polling_events = self.contexts.len() > 0;
sender.send(self.polling_events).unwrap();
}
fn notify_events(&self, events: Vec<VRDisplayEvent>) {
let pipeline_ids: Vec<PipelineId> = self.contexts.iter().map(|c| *c).collect();
for event in events {
let event = WebVREventMsg::DisplayEvent(event);
self.constellation_chan.send(ConstellationMsg::WebVREvent(pipeline_ids.clone(), event)).unwrap();
}
}
#[inline]
fn notify_event(&self, event: VRDisplayEvent) {
self.notify_events(vec![event]);
}
fn schedule_poll_events(&mut self) {
if !self.service.is_initialized() || self.polling_events {
return;
}
self.polling_events = true;
let webvr_thread = self.sender.clone();
let (sender, receiver) = ipc::channel().unwrap();
// Defines the polling interval time in ms for VR Events such as VRDisplay connected, disconnected, etc.
let polling_interval: u64 = PREFS.get("dom.webvr.event_polling_interval").as_u64().unwrap_or(500);
thread::Builder::new().name("WebVRPollEvents".into()).spawn(move || {
loop {
if webvr_thread.send(WebVRMsg::PollEvents(sender.clone())).is_err() {
// WebVR Thread closed
break;
}
if !receiver.recv().unwrap_or(false) {
// WebVR Thread asked to unschedule this thread
break;
}
thread::sleep(time::Duration::from_millis(polling_interval));
}
}).expect("Thread spawning failed");
}
}
/// Notes about WebVRCompositorHandler implementation:
/// Raw pointers are used instead of Arc<Mutex> as a heavy optimization for latency reasons.
/// This also avoids "JS DDoS" attacks: like a secondary JavaScript tab degrading performance
/// by flooding the WebVRThread with messages while the main JavaScript tab is presenting to the headset.
/// Multithreading won't be a problem because:
/// * Thanks to the security rules implemented in the WebVRThread, when a VRDisplay is in a presenting loop
/// no other JSContext is granted access to the VRDisplay. So really there arent multithreading race conditions.
/// * VRDisplay implementations are designed to allow calling compositor functions
/// in another thread by using the Send + Sync traits.
/// VRDisplays pointers are guaranteed to be valid memory:
/// * VRDisplays are owned by the VRServiceManager which lives in the WebVRThread.
/// * WebVRCompositorHandler is stopped automatically when a JS tab is closed or the whole browser is closed.
/// * WebVRThread and its VRDisplays are destroyed after all tabs are dropped and the browser is about to exit.
/// WebVRThread is closed using the Exit message.
pub struct WebVRCompositor(*mut VRDisplay);
pub struct WebVRCompositorHandler {
compositors: HashMap<webrender_traits::VRCompositorId, WebVRCompositor>,
webvr_thread_receiver: Receiver<Option<WebVRCompositor>>,
webvr_thread_sender: Option<IpcSender<WebVRMsg>>
}
#[allow(unsafe_code)]
unsafe impl Send for WebVRCompositor {}
pub type WebVRCompositorSender = Sender<Option<WebVRCompositor>>;
impl WebVRCompositorHandler {
pub fn new() -> (Box<WebVRCompositorHandler>, WebVRCompositorSender) {
let (sender, receiver) = mpsc::channel();
let instance = Box::new(WebVRCompositorHandler {
compositors: HashMap::new(),
webvr_thread_receiver: receiver,
webvr_thread_sender: None
});
(instance, sender)
}
}
impl webrender_traits::VRCompositorHandler for WebVRCompositorHandler {
#[allow(unsafe_code)]
fn handle(&mut self, cmd: webrender_traits::VRCompositorCommand, texture_id: Option<u32>) {
match cmd {
webrender_traits::VRCompositorCommand::Create(compositor_id) => {
self.create_compositor(compositor_id);
}
webrender_traits::VRCompositorCommand::SyncPoses(compositor_id, near, far, sender) => {
if let Some(compositor) = self.compositors.get(&compositor_id) {
let pose = unsafe {
(*compositor.0).sync_poses();
(*compositor.0).synced_frame_data(near, far).to_bytes()
};
let _ = sender.send(Ok(pose));
} else {
let _ = sender.send(Err(()));
}
}
webrender_traits::VRCompositorCommand::SubmitFrame(compositor_id, left_bounds, right_bounds) => {
if let Some(compositor) = self.compositors.get(&compositor_id) {
if let Some(texture_id) = texture_id {
let layer = VRLayer {
texture_id: texture_id,
left_bounds: left_bounds,
right_bounds: right_bounds
};
unsafe {
(*compositor.0).submit_frame(&layer);
}
}
}
}
webrender_traits::VRCompositorCommand::Release(compositor_id) => {
self.compositors.remove(&compositor_id);
}
}
}
}
impl WebVRCompositorHandler {
#[allow(unsafe_code)]
fn create_compositor(&mut self, display_id: webrender_traits::VRCompositorId) {
let sender = match self.webvr_thread_sender {
Some(ref s) => s,
None => return,
};
sender.send(WebVRMsg::CreateCompositor(display_id)).unwrap();
let display = self.webvr_thread_receiver.recv().unwrap();
match display {
Some(display) => {
self.compositors.insert(display_id, display);
},
None => {
error!("VRDisplay not found when creating a new VRCompositor");
}
};
}
// This is done on only a per-platform basis on initialization.
pub fn set_webvr_thread_sender(&mut self, sender: IpcSender<WebVRMsg>) {
self.webvr_thread_sender = Some(sender);
}
}

View file

@ -0,0 +1,17 @@
[package]
name = "webvr_traits"
version = "0.0.1"
authors = ["The Servo Project Developers"]
license = "MPL-2.0"
publish = false
[lib]
name = "webvr_traits"
path = "lib.rs"
[dependencies]
ipc-channel = "0.5"
msg = {path = "../msg"}
serde = "0.8"
serde_derive = "0.8"
rust-webvr = {version = "0.1", features = ["serde-serialization"]}

View file

@ -0,0 +1,29 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(custom_derive)]
#![feature(plugin)]
#![deny(unsafe_code)]
extern crate ipc_channel;
extern crate msg;
extern crate serde;
#[macro_use]
extern crate serde_derive;
pub extern crate rust_webvr as webvr;
mod webvr_traits;
pub use webvr::VRDisplayData as WebVRDisplayData;
pub use webvr::VRDisplayCapabilities as WebVRDisplayCapabilities;
pub use webvr::VRDisplayEvent as WebVRDisplayEvent;
pub use webvr::VRDisplayEventReason as WebVRDisplayEventReason;
pub use webvr::VREye as WebVREye;
pub use webvr::VREyeParameters as WebVREyeParameters;
pub use webvr::VRFieldOfView as WebVRFieldOfView;
pub use webvr::VRFrameData as WebVRFrameData;
pub use webvr::VRLayer as WebVRLayer;
pub use webvr::VRPose as WebVRPose;
pub use webvr::VRStageParameters as WebVRStageParameters;
pub use webvr_traits::{WebVRMsg, WebVRResult};

View file

@ -0,0 +1,24 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use webvr::*;
pub type WebVRResult<T> = Result<T, String>;
// Messages from Script thread to WebVR thread.
#[derive(Deserialize, Serialize)]
pub enum WebVRMsg {
RegisterContext(PipelineId),
UnregisterContext(PipelineId),
PollEvents(IpcSender<bool>),
GetDisplays(IpcSender<WebVRResult<Vec<VRDisplayData>>>),
GetFrameData(PipelineId, u64, f64, f64, IpcSender<WebVRResult<VRFrameData>>),
ResetPose(PipelineId, u64, IpcSender<WebVRResult<VRDisplayData>>),
RequestPresent(PipelineId, u64, IpcSender<WebVRResult<()>>),
ExitPresent(PipelineId, u64, Option<IpcSender<WebVRResult<()>>>),
CreateCompositor(u64),
Exit,
}

View file

@ -7,6 +7,8 @@
"dom.serviceworker.timeout_seconds": 60,
"dom.testable_crash.enabled": false,
"dom.testbinding.enabled": false,
"dom.webvr.enabled": false,
"dom.webvr.event_polling_interval": 500,
"js.asmjs.enabled": true,
"js.asyncstack.enabled": false,
"js.baseline.enabled": true,

View file

@ -0,0 +1,320 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>07 - Advanced Mirroring</title>
<!--
This sample demonstrates how to display a different view of the scene on
an external monitor than is being viewed through the headset.
-->
<style>
#webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-debug-geometry.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-island.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script>
/* global mat4, vec3, VRCubeIsland, WGLUDebugGeometry, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var PLAYER_HEIGHT = 1.65;
var vrDisplay = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var poseMat = mat4.create();
var tmpMat = mat4.create();
var vrPresentButton = null;
var orientation = [0, 0, 0, 1];
var position = [0, 0, 0];
// ===================================================
// WebGL scene setup. This code is not WebVR specific.
// ===================================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var gl = null;
var cubeIsland = null;
var stats = null;
var debugGeom = null;
function initWebGL () {
var glAttribs = {
alpha: false,
antialias: false //!VRSamplesUtil.isMobile()
// When doing mirroring like this, do NOT turn on PreserveDrawingBuffer!
};
gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
// Using cubeIsland for this sample because it's easier to see from a
// third person view.
cubeIsland = new VRCubeIsland(gl, texture, 2, 2);
stats = new WGLUStats(gl);
debugGeom = new WGLUDebugGeometry(gl);
// Wait until we have a WebGL context to resize and start rendering.
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
// ================================
// WebVR-specific code begins here.
// ================================
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
var frameData;
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
initWebGL();
if (vrDisplay.stageParameters &&
vrDisplay.stageParameters.sizeX > 0 &&
vrDisplay.stageParameters.sizeZ > 0) {
cubeIsland.resize(vrDisplay.stageParameters.sizeX, vrDisplay.stageParameters.sizeZ);
}
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
} else {
initWebGL();
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
initWebGL();
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
initWebGL();
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
function getStandingViewMatrix (out, view) {
if (vrDisplay.stageParameters) {
mat4.invert(out, vrDisplay.stageParameters.sittingToStandingTransform);
mat4.multiply(out, view, out);
} else {
mat4.identity(out);
mat4.translate(out, out, [0, PLAYER_HEIGHT, 0]);
mat4.invert(out, out);
mat4.multiply(out, view, out);
}
}
function getPoseMatrix (out, pose) {
orientation = pose.orientation;
position = pose.position;
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!position) { position = [0, 0, 0]; }
mat4.fromRotationTranslation(tmpMat, orientation, position);
mat4.invert(tmpMat, tmpMat);
getStandingViewMatrix(out, tmpMat);
mat4.invert(out, out);
}
function renderSceneThirdPersonView (pose) {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
// Set up the camera in the back left corner of the island
mat4.identity(viewMat);
mat4.translate(viewMat, viewMat, [-2, 2.5, 2]);
mat4.rotateY(viewMat, viewMat, Math.PI * -0.25);
mat4.rotateX(viewMat, viewMat, Math.PI * -0.15);
mat4.invert(viewMat, viewMat);
cubeIsland.render(projectionMat, viewMat, stats);
// Render a debug view of the headset's position
if (pose) {
getPoseMatrix(poseMat, pose);
mat4.getTranslation(position, poseMat);
mat4.getRotation(orientation, poseMat);
debugGeom.bind(projectionMat, viewMat);
debugGeom.drawCube(orientation, position, 0.2, [0, 1, 0, 1]);
}
stats.renderOrtho();
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if(vrDisplay.isPresenting) {
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
cubeIsland.render(frameData.leftProjectionMatrix, viewMat, stats);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
getStandingViewMatrix(viewMat, frameData.rightViewMatrix);
cubeIsland.render(frameData.rightProjectionMatrix, viewMat, stats);
// VRDisplay.submitFrame
vrDisplay.submitFrame();
// If we have an external display we can render a different version
// of the scene entirely after calling submitFrame and it will be
// shown on the page. Depending on the content this can be expensive
// so this technique should only be used when it will not interfere
// with the performance of the VR rendering.
if (vrDisplay.capabilities.hasExternalDisplay) {
renderSceneThirdPersonView(frameData.pose);
}
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
cubeIsland.render(projectionMat, viewMat, stats);
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
renderSceneThirdPersonView(null);
}
stats.end();
}
})();
</script>
</body>
</html>

View file

@ -0,0 +1,312 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>08 - Dynamic Resolution</title>
<!--
This sample demonstrates how to efficiently adjust the resolution of your
WebVR scene on the fly using the layer bounds. Based off sample 4b.
-->
<style>
body {
background-color: black;
}
#canvas-clip, #webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
#canvas-clip.presenting {
overflow: hidden;
bottom: 0;
right: 0;
margin: auto;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<div id="canvas-clip">
<canvas id="webgl-canvas"></canvas>
</div>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ================================
// WebVR-specific code begins here.
// ================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var canvasClip = document.getElementById("canvas-clip");
var gl = null;
var cubeSea = null;
var stats = null;
function initWebGL (preserveDrawingBuffer) {
var glAttribs = {
alpha: false,
antialias: false, //!VRSamplesUtil.isMobile(),
preserveDrawingBuffer: false //preserveDrawingBuffer
};
gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
cubeSea = new VRCubeSea(gl, texture);
stats = new WGLUStats(gl);
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
resolutionMultiplier = 1.0;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
canvasClip.classList.add("presenting");
var leftEye = vrDisplay.getEyeParameters("left");
canvasClip.style.width = (leftEye.renderWidth/2) + "px";
canvasClip.style.height = (leftEye.renderHeight/2) + "px";
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
canvasClip.classList.remove("presenting");
canvasClip.style.width = "";
canvasClip.style.height = "";
webglCanvas.style.width = "";
webglCanvas.style.height = "";
}
}
// Make sure the canvas is resized AFTER we've updated the container div.
onResize();
}
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
initWebGL(vrDisplay.capabilities.hasExternalDisplay);
} else {
initWebGL(false);
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
// How
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
// How large our frame should be in relation to the recommended render
// target size.
var resolutionMultiplier = 1.0;
var eyeWidth, eyeHeight;
var lastAdjustment = 0;
function adjustResolution(t) {
// Update the resolution every quarter second
if (t - lastAdjustment < 100)
return;
lastAdjustment = t;
// Modify the resolution we are rendering at over time on a sin wave.
// In the real world this would probably be based on scene complexity.
// Oscillates between 1.0 to 0.5.
resolutionMultiplier = (Math.sin(t / 1000) * 0.25) + 0.75;
eyeWidth = webglCanvas.width * 0.5 * resolutionMultiplier;
eyeHeight = webglCanvas.height * resolutionMultiplier;
// Layer bounds are described in UV space, so 0.0 to 1.0
var boundsWidth = 0.5 * resolutionMultiplier;
var boundsHeight = resolutionMultiplier;
// Tell the presenting display about the new texture bounds. This
// ensures it only picks up the parts of the texture we're going to be
// rendering to and avoids the need to resize the WebGL canvas, which
// can be a slow operation. Because we're already presenting when we
// call requestPresent again it only updates the VRLayer information and
// doesn't require a user gesture.
vrDisplay.requestPresent([{
source: webglCanvas,
leftBounds: [0.0, 0.0, boundsWidth, boundsHeight],
rightBounds: [boundsWidth, 0.0, boundsWidth, boundsHeight],
}]);
// To ensure our mirrored content also shows up correctly we'll scale
// the canvas display size to be scaled appropriately such that it
// continues to only show one eye.
webglCanvas.style.width = (1.0/resolutionMultiplier) * 200 + "%";
webglCanvas.style.height = (1.0/resolutionMultiplier) * 100 + "%";
//webglCanvas.style.marginTop = ((eyeHeight - webglCanvas.height)* resolutionMultiplier) + "px";
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
adjustResolution(t);
// Note that the viewports use the eyeWidth/height rather than the
// canvas width and height.
gl.viewport(0, webglCanvas.height-eyeHeight, eyeWidth, eyeHeight);
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
gl.viewport(eyeWidth, webglCanvas.height-eyeHeight, eyeWidth, eyeHeight);
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
vrDisplay.submitFrame();
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,270 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
var WGLUDebugGeometry = (function() {
"use strict";
var debugGeomVS = [
"uniform mat4 projectionMat;",
"uniform mat4 viewMat;",
"uniform mat4 modelMat;",
"attribute vec3 position;",
"void main() {",
" gl_Position = projectionMat * viewMat * modelMat * vec4( position, 1.0 );",
"}",
].join("\n");
var debugGeomFS = [
"precision mediump float;",
"uniform vec4 color;",
"void main() {",
" gl_FragColor = color;",
"}",
].join("\n");
var DebugGeometry = function(gl) {
this.gl = gl;
this.projMat = mat4.create();
this.viewMat = mat4.create();
this.modelMat = mat4.create();
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(debugGeomVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(debugGeomFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({ position: 0 });
this.program.link();
var verts = [];
var indices = [];
//
// Cube Geometry
//
this.cubeIndexOffset = indices.length;
var size = 0.5;
// Bottom
var idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(-size, -size, -size);
verts.push(+size, -size, -size);
verts.push(+size, -size, +size);
verts.push(-size, -size, +size);
// Top
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, +size, -size);
verts.push(+size, +size, -size);
verts.push(+size, +size, +size);
verts.push(-size, +size, +size);
// Left
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, -size, -size);
verts.push(-size, +size, -size);
verts.push(-size, +size, +size);
verts.push(-size, -size, +size);
// Right
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(+size, -size, -size);
verts.push(+size, +size, -size);
verts.push(+size, +size, +size);
verts.push(+size, -size, +size);
// Back
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, -size, -size);
verts.push(+size, -size, -size);
verts.push(+size, +size, -size);
verts.push(-size, +size, -size);
// Front
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(-size, -size, +size);
verts.push(+size, -size, +size);
verts.push(+size, +size, +size);
verts.push(-size, +size, +size);
this.cubeIndexCount = indices.length - this.cubeIndexOffset;
//
// Cone Geometry
//
this.coneIndexOffset = indices.length;
var size = 0.5;
var conePointVertex = verts.length / 3.0;
var coneBaseVertex = conePointVertex+1;
var coneSegments = 16;
// Point
verts.push(0, size, 0);
// Base Vertices
for (var i = 0; i < coneSegments; ++i) {
if (i > 0) {
idx = verts.length / 3.0;
indices.push(idx-1, conePointVertex, idx);
}
var rad = ((Math.PI * 2) / coneSegments) * i;
verts.push(Math.sin(rad) * (size / 2.0), -size, Math.cos(rad) * (size / 2.0));
}
// Last triangle to fill the gap
indices.push(idx, conePointVertex, coneBaseVertex);
// Base triangles
for (var i = 2; i < coneSegments; ++i) {
indices.push(coneBaseVertex, coneBaseVertex+(i-1), coneBaseVertex+i);
}
this.coneIndexCount = indices.length - this.coneIndexOffset;
//
// Rect geometry
//
this.rectIndexOffset = indices.length;
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2, idx+3, idx);
verts.push(0, 0, 0);
verts.push(1, 0, 0);
verts.push(1, 1, 0);
verts.push(0, 1, 0);
this.rectIndexCount = indices.length - this.rectIndexOffset;
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
};
DebugGeometry.prototype.bind = function(projectionMat, viewMat) {
var gl = this.gl;
var program = this.program;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.viewMat, false, viewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 12, 0);
};
DebugGeometry.prototype.bindOrtho = function() {
mat4.ortho(this.projMat, 0, this.gl.canvas.width, this.gl.canvas.height, 0, 0.1, 1024);
mat4.identity(this.viewMat);
this.bind(this.projMat, this.viewMat);
};
DebugGeometry.prototype._bindUniforms = function(orientation, position, scale, color) {
if (!position) { position = [0, 0, 0]; }
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!scale) { scale = [1, 1, 1]; }
if (!color) { color = [1, 0, 0, 1]; }
mat4.fromRotationTranslationScale(this.modelMat, orientation, position, scale);
this.gl.uniformMatrix4fv(this.program.uniform.modelMat, false, this.modelMat);
this.gl.uniform4fv(this.program.uniform.color, color);
};
DebugGeometry.prototype.drawCube = function(orientation, position, size, color) {
var gl = this.gl;
if (!size) { size = 1; }
this._bindUniforms(orientation, position, [size, size, size], color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawBox = function(orientation, position, scale, color) {
var gl = this.gl;
this._bindUniforms(orientation, position, scale, color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawBoxWithMatrix = function(mat, color) {
var gl = this.gl;
gl.uniformMatrix4fv(this.program.uniform.modelMat, false, mat);
gl.uniform4fv(this.program.uniform.color, color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawRect = function(x, y, width, height, color) {
var gl = this.gl;
this._bindUniforms(null, [x, y, -1], [width, height, 1], color);
gl.drawElements(gl.LINE_STRIP, this.rectIndexCount, gl.UNSIGNED_SHORT, this.rectIndexOffset * 2.0);
};
DebugGeometry.prototype.drawCone = function(orientation, position, size, color) {
var gl = this.gl;
if (!size) { size = 1; }
this._bindUniforms(orientation, position, [size, size, size], color);
gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
};
DebugGeometry.prototype.drawConeWithMatrix = function(mat, color) {
var gl = this.gl;
gl.uniformMatrix4fv(this.program.uniform.modelMat, false, mat);
gl.uniform4fv(this.program.uniform.color, color);
gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
};
return DebugGeometry;
})();

View file

@ -0,0 +1,162 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Caches specified GL state, runs a callback, and restores the cached state when
done.
Example usage:
var savedState = [
gl.ARRAY_BUFFER_BINDING,
// TEXTURE_BINDING_2D or _CUBE_MAP must always be followed by the texure unit.
gl.TEXTURE_BINDING_2D, gl.TEXTURE0,
gl.CLEAR_COLOR,
];
// After this call the array buffer, texture unit 0, active texture, and clear
// color will be restored. The viewport will remain changed, however, because
// gl.VIEWPORT was not included in the savedState list.
WGLUPreserveGLState(gl, savedState, function(gl) {
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, ....);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, ...);
gl.clearColor(1, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
});
Note that this is not intended to be fast. Managing state in your own code to
avoid redundant state setting and querying will always be faster. This function
is most useful for cases where you may not have full control over the WebGL
calls being made, such as tooling or effect injectors.
*/
function WGLUPreserveGLState(gl, bindings, callback) {
if (!bindings) {
callback(gl);
return;
}
var boundValues = [];
var activeTexture = null;
for (var i = 0; i < bindings.length; ++i) {
var binding = bindings[i];
switch (binding) {
case gl.TEXTURE_BINDING_2D:
case gl.TEXTURE_BINDING_CUBE_MAP:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31) {
console.error("TEXTURE_BINDING_2D or TEXTURE_BINDING_CUBE_MAP must be followed by a valid texture unit");
boundValues.push(null, null);
break;
}
if (!activeTexture) {
activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
}
gl.activeTexture(textureUnit);
boundValues.push(gl.getParameter(binding), null);
break;
case gl.ACTIVE_TEXTURE:
activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
boundValues.push(null);
break;
default:
boundValues.push(gl.getParameter(binding));
break;
}
}
callback(gl);
for (var i = 0; i < bindings.length; ++i) {
var binding = bindings[i];
var boundValue = boundValues[i];
switch (binding) {
case gl.ACTIVE_TEXTURE:
break; // Ignore this binding, since we special-case it to happen last.
case gl.ARRAY_BUFFER_BINDING:
gl.bindBuffer(gl.ARRAY_BUFFER, boundValue);
break;
case gl.COLOR_CLEAR_VALUE:
gl.clearColor(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.COLOR_WRITEMASK:
gl.colorMask(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.CURRENT_PROGRAM:
gl.useProgram(boundValue);
break;
case gl.ELEMENT_ARRAY_BUFFER_BINDING:
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, boundValue);
break;
case gl.FRAMEBUFFER_BINDING:
gl.bindFramebuffer(gl.FRAMEBUFFER, boundValue);
break;
case gl.RENDERBUFFER_BINDING:
gl.bindRenderbuffer(gl.RENDERBUFFER, boundValue);
break;
case gl.TEXTURE_BINDING_2D:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
break;
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_2D, boundValue);
break;
case gl.TEXTURE_BINDING_CUBE_MAP:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
break;
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, boundValue);
break;
case gl.VIEWPORT:
gl.viewport(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.BLEND:
case gl.CULL_FACE:
case gl.DEPTH_TEST:
case gl.SCISSOR_TEST:
case gl.STENCIL_TEST:
if (boundValue) {
gl.enable(binding);
} else {
gl.disable(binding);
}
break;
default:
console.log("No GL restore behavior for 0x" + binding.toString(16));
break;
}
if (activeTexture) {
gl.activeTexture(activeTexture);
}
}
}

View file

@ -0,0 +1,179 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Utility class to make loading shader programs easier. Does all the error
checking you typically want, automatically queries uniform and attribute
locations, and attempts to take advantage of some browser's ability to link
asynchronously by not querying any information from the program until it's
first use.
*/
var WGLUProgram = (function() {
"use strict";
// Attempts to allow the browser to asynchronously compile and link
var Program = function(gl) {
this.gl = gl;
this.program = gl.createProgram();
this.attrib = null;
this.uniform = null;
this._firstUse = true;
this._vertexShader = null;
this._fragmentShader = null;
}
Program.prototype.attachShaderSource = function(source, type) {
var gl = this.gl;
var shader;
switch (type) {
case gl.VERTEX_SHADER:
this._vertexShader = gl.createShader(type);
shader = this._vertexShader;
break;
case gl.FRAGMENT_SHADER:
this._fragmentShader = gl.createShader(type);
shader = this._fragmentShader;
break;
default:
console.Error("Invalid Shader Type:", type);
return;
}
gl.attachShader(this.program, shader);
gl.shaderSource(shader, source);
gl.compileShader(shader);
}
Program.prototype.attachShaderSourceFromXHR = function(url, type) {
var self = this;
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.addEventListener("load", function (ev) {
if (xhr.status == 200) {
self.attachShaderSource(xhr.response, type);
resolve();
} else {
reject(xhr.statusText);
}
}, false);
xhr.open("GET", url, true);
xhr.send(null);
});
}
Program.prototype.attachShaderSourceFromTag = function(tagId, type) {
var shaderTag = document.getElementById(tagId);
if (!shaderTag) {
console.error("Shader source tag not found:", tagId);
return;
}
if (!type) {
if (shaderTag.type == "x-shader/x-vertex") {
type = this.gl.VERTEX_SHADER;
} else if (shaderTag.type == "x-shader/x-fragment") {
type = this.gl.FRAGMENT_SHADER;
} else {
console.error("Invalid Shader Type:", shaderTag.type);
return;
}
}
var src = "";
var k = shaderTag.firstChild;
while (k) {
if (k.nodeType == 3) {
src += k.textContent;
}
k = k.nextSibling;
}
this.attachShaderSource(src, type);
}
Program.prototype.bindAttribLocation = function(attribLocationMap) {
var gl = this.gl;
if (attribLocationMap) {
this.attrib = {};
for (var attribName in attribLocationMap) {
gl.bindAttribLocation(this.program, attribLocationMap[attribName], attribName);
this.attrib[attribName] = attribLocationMap[attribName];
}
}
}
Program.prototype.transformFeedbackVaryings = function(varyings, type) {
gl.transformFeedbackVaryings(this.program, varyings, type);
}
Program.prototype.link = function() {
this.gl.linkProgram(this.program);
}
Program.prototype.use = function() {
var gl = this.gl;
// If this is the first time the program has been used do all the error checking and
// attrib/uniform querying needed.
if (this._firstUse) {
if (!gl.getProgramParameter(this.program, gl.LINK_STATUS)) {
if (this._vertexShader && !gl.getShaderParameter(this._vertexShader, gl.COMPILE_STATUS)) {
console.error("Vertex shader compile error:", gl.getShaderInfoLog(this._vertexShader));
} else if (this._fragmentShader && !gl.getShaderParameter(this._fragmentShader, gl.COMPILE_STATUS)) {
console.error("Fragment shader compile error:", gl.getShaderInfoLog(this._fragmentShader));
} else {
console.error("Program link error:", gl.getProgramInfoLog(this.program));
}
gl.deleteProgram(this.program);
this.program = null;
} else {
if (!this.attrib) {
this.attrib = {};
var attribCount = gl.getProgramParameter(this.program, gl.ACTIVE_ATTRIBUTES);
for (var i = 0; i < attribCount; i++) {
var attribInfo = gl.getActiveAttrib(this.program, i);
this.attrib[attribInfo.name] = gl.getAttribLocation(this.program, attribInfo.name);
}
}
this.uniform = {};
var uniformCount = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
var uniformName = "";
for (var i = 0; i < uniformCount; i++) {
var uniformInfo = gl.getActiveUniform(this.program, i);
uniformName = uniformInfo.name.replace("[0]", "");
this.uniform[uniformName] = gl.getUniformLocation(this.program, uniformName);
}
}
gl.deleteShader(this._vertexShader);
gl.deleteShader(this._fragmentShader);
this._firstUse = false;
}
gl.useProgram(this.program);
}
return Program;
})();

View file

@ -0,0 +1,649 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Heavily inspired by Mr. Doobs stats.js, this FPS counter is rendered completely
with WebGL, allowing it to be shown in cases where overlaid HTML elements aren't
usable (like WebVR), or if you want the FPS counter to be rendered as part of
your scene.
See stats-test.html for basic usage.
*/
var WGLUStats = (function() {
"use strict";
//--------------------
// glMatrix functions
//--------------------
// These functions have been copied here from glMatrix (glmatrix.net) to allow
// this file to run standalone.
var mat4_identity = function(out) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
};
var mat4_multiply = function (out, a, b) {
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15];
// Cache only the current line of the second matrix
var b0 = b[0], b1 = b[1], b2 = b[2], b3 = b[3];
out[0] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[1] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[2] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[3] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[4]; b1 = b[5]; b2 = b[6]; b3 = b[7];
out[4] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[5] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[6] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[7] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[8]; b1 = b[9]; b2 = b[10]; b3 = b[11];
out[8] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[9] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[10] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[11] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[12]; b1 = b[13]; b2 = b[14]; b3 = b[15];
out[12] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[13] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[14] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[15] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
return out;
};
var mat4_fromTranslation = function(out, v) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = v[0];
out[13] = v[1];
out[14] = v[2];
out[15] = 1;
return out;
};
var mat4_ortho = function (out, left, right, bottom, top, near, far) {
var lr = 1 / (left - right),
bt = 1 / (bottom - top),
nf = 1 / (near - far);
out[0] = -2 * lr;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = -2 * bt;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 2 * nf;
out[11] = 0;
out[12] = (left + right) * lr;
out[13] = (top + bottom) * bt;
out[14] = (far + near) * nf;
out[15] = 1;
return out;
};
var mat4_translate = function (out, a, v) {
var x = v[0], y = v[1], z = v[2],
a00, a01, a02, a03,
a10, a11, a12, a13,
a20, a21, a22, a23;
if (a === out) {
out[12] = a[0] * x + a[4] * y + a[8] * z + a[12];
out[13] = a[1] * x + a[5] * y + a[9] * z + a[13];
out[14] = a[2] * x + a[6] * y + a[10] * z + a[14];
out[15] = a[3] * x + a[7] * y + a[11] * z + a[15];
} else {
a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3];
a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7];
a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11];
out[0] = a00; out[1] = a01; out[2] = a02; out[3] = a03;
out[4] = a10; out[5] = a11; out[6] = a12; out[7] = a13;
out[8] = a20; out[9] = a21; out[10] = a22; out[11] = a23;
out[12] = a00 * x + a10 * y + a20 * z + a[12];
out[13] = a01 * x + a11 * y + a21 * z + a[13];
out[14] = a02 * x + a12 * y + a22 * z + a[14];
out[15] = a03 * x + a13 * y + a23 * z + a[15];
}
return out;
};
var mat4_scale = function(out, a, v) {
var x = v[0], y = v[1], z = v[2];
out[0] = a[0] * x;
out[1] = a[1] * x;
out[2] = a[2] * x;
out[3] = a[3] * x;
out[4] = a[4] * y;
out[5] = a[5] * y;
out[6] = a[6] * y;
out[7] = a[7] * y;
out[8] = a[8] * z;
out[9] = a[9] * z;
out[10] = a[10] * z;
out[11] = a[11] * z;
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
return out;
};
//-------------------
// Utility functions
//-------------------
function linkProgram(gl, vertexSource, fragmentSource, attribLocationMap) {
// No error checking for brevity.
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexSource);
gl.compileShader(vertexShader);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentSource);
gl.compileShader(fragmentShader);
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
for (var attribName in attribLocationMap)
gl.bindAttribLocation(program, attribLocationMap[attribName], attribName);
gl.linkProgram(program);
gl.deleteShader(vertexShader);
gl.deleteShader(fragmentShader);
return program;
}
function getProgramUniforms(gl, program) {
var uniforms = {};
var uniformCount = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS);
var uniformName = "";
for (var i = 0; i < uniformCount; i++) {
var uniformInfo = gl.getActiveUniform(program, i);
uniformName = uniformInfo.name.replace("[0]", "");
uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
}
return uniforms;
}
//----------------------------
// Seven-segment text display
//----------------------------
var sevenSegmentVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec2 position;",
"void main() {",
" gl_Position = projectionMat * modelViewMat * vec4( position, 0.0, 1.0 );",
"}",
].join("\n");
var sevenSegmentFS = [
"precision mediump float;",
"uniform vec4 color;",
"void main() {",
" gl_FragColor = color;",
"}",
].join("\n");
var SevenSegmentText = function (gl) {
this.gl = gl;
this.attribs = {
position: 0,
color: 1
};
this.program = linkProgram(gl, sevenSegmentVS, sevenSegmentFS, this.attribs);
this.uniforms = getProgramUniforms(gl, this.program);
var verts = [];
var segmentIndices = {};
var indices = [];
var width = 0.5;
var thickness = 0.25;
this.kerning = 2.0;
this.matrix = new Float32Array(16);
function defineSegment(id, left, top, right, bottom) {
var idx = verts.length / 2;
verts.push(
left, top,
right, top,
right, bottom,
left, bottom);
segmentIndices[id] = [
idx, idx+2, idx+1,
idx, idx+3, idx+2];
}
var characters = {};
this.characters = characters;
function defineCharacter(c, segments) {
var character = {
character: c,
offset: indices.length * 2,
count: 0
};
for (var i = 0; i < segments.length; ++i) {
var idx = segments[i];
var segment = segmentIndices[idx];
character.count += segment.length;
indices.push.apply(indices, segment);
}
characters[c] = character;
}
/* Segment layout is as follows:
|-0-|
3 4
|-1-|
5 6
|-2-|
*/
defineSegment(0, -1, 1, width, 1-thickness);
defineSegment(1, -1, thickness*0.5, width, -thickness*0.5);
defineSegment(2, -1, -1+thickness, width, -1);
defineSegment(3, -1, 1, -1+thickness, -thickness*0.5);
defineSegment(4, width-thickness, 1, width, -thickness*0.5);
defineSegment(5, -1, thickness*0.5, -1+thickness, -1);
defineSegment(6, width-thickness, thickness*0.5, width, -1);
defineCharacter("0", [0, 2, 3, 4, 5, 6]);
defineCharacter("1", [4, 6]);
defineCharacter("2", [0, 1, 2, 4, 5]);
defineCharacter("3", [0, 1, 2, 4, 6]);
defineCharacter("4", [1, 3, 4, 6]);
defineCharacter("5", [0, 1, 2, 3, 6]);
defineCharacter("6", [0, 1, 2, 3, 5, 6]);
defineCharacter("7", [0, 4, 6]);
defineCharacter("8", [0, 1, 2, 3, 4, 5, 6]);
defineCharacter("9", [0, 1, 2, 3, 4, 6]);
defineCharacter("A", [0, 1, 3, 4, 5, 6]);
defineCharacter("B", [1, 2, 3, 5, 6]);
defineCharacter("C", [0, 2, 3, 5]);
defineCharacter("D", [1, 2, 4, 5, 6]);
defineCharacter("E", [0, 1, 2, 4, 6]);
defineCharacter("F", [0, 1, 3, 5]);
defineCharacter("P", [0, 1, 3, 4, 5]);
defineCharacter("-", [1]);
defineCharacter(" ", []);
defineCharacter("_", [2]); // Used for undefined characters
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.DYNAMIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
};
SevenSegmentText.prototype.render = function(projectionMat, modelViewMat, text, r, g, b, a) {
var gl = this.gl;
if (r == undefined || g == undefined || b == undefined) {
r = 0.0;
g = 1.0;
b = 0.0;
}
if (a == undefined)
a = 1.0;
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
gl.uniform4f(this.uniforms.color, r, g, b, a);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(this.attribs.position);
gl.vertexAttribPointer(this.attribs.position, 2, gl.FLOAT, false, 8, 0);
text = text.toUpperCase();
var offset = 0;
for (var i = 0; i < text.length; ++i) {
var c;
if (text[i] in this.characters) {
c = this.characters[text[i]];
} else {
c = this.characters["_"];
}
if (c.count != 0) {
mat4_fromTranslation(this.matrix, [offset, 0, 0]);
mat4_multiply(this.matrix, modelViewMat, this.matrix);
gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, this.matrix);
gl.drawElements(gl.TRIANGLES, c.count, gl.UNSIGNED_SHORT, c.offset);
}
offset += this.kerning;
}
}
//-----------
// FPS Graph
//-----------
var statsVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec3 color;",
"varying vec4 vColor;",
"void main() {",
" vColor = vec4(color, 1.0);",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var statsFS = [
"precision mediump float;",
"varying vec4 vColor;",
"void main() {",
" gl_FragColor = vColor;",
"}",
].join("\n");
var segments = 30;
var maxFPS = 90;
function segmentToX(i) {
return ((0.9/segments) * i) - 0.45;
}
function fpsToY(value) {
return (Math.min(value, maxFPS) * (0.7 / maxFPS)) - 0.45;
}
function fpsToRGB(value) {
return {
r: Math.max(0.0, Math.min(1.0, 1.0 - (value/60))),
g: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15)))),
b: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15))))
};
}
var now = /*( performance && performance.now ) ? performance.now.bind( performance ) :*/ Date.now;
var Stats = function(gl) {
this.gl = gl;
this.sevenSegmentText = new SevenSegmentText(gl);
this.startTime = now();
this.prevTime = this.startTime;
this.frames = 0;
this.fps = 0;
this.orthoProjMatrix = new Float32Array(16);
this.orthoViewMatrix = new Float32Array(16);
this.modelViewMatrix = new Float32Array(16);
// Hard coded because it doesn't change:
// Scale by 0.075 in X and Y
// Translate into upper left corner w/ z = 0.02
this.textMatrix = new Float32Array([
0.075, 0, 0, 0,
0, 0.075, 0, 0,
0, 0, 1, 0,
-0.3625, 0.3625, 0.02, 1
]);
this.lastSegment = 0;
this.attribs = {
position: 0,
color: 1
};
this.program = linkProgram(gl, statsVS, statsFS, this.attribs);
this.uniforms = getProgramUniforms(gl, this.program);
var fpsVerts = [];
var fpsIndices = [];
// Graph geometry
for (var i = 0; i < segments; ++i) {
// Bar top
fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
// Bar bottom
fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
var idx = i * 4;
fpsIndices.push(idx, idx+3, idx+1,
idx+3, idx, idx+2);
}
function addBGSquare(left, bottom, right, top, z, r, g, b) {
var idx = fpsVerts.length / 6;
fpsVerts.push(left, bottom, z, r, g, b);
fpsVerts.push(right, top, z, r, g, b);
fpsVerts.push(left, top, z, r, g, b);
fpsVerts.push(right, bottom, z, r, g, b);
fpsIndices.push(idx, idx+1, idx+2,
idx, idx+3, idx+1);
};
// Panel Background
addBGSquare(-0.5, -0.5, 0.5, 0.5, 0.0, 0.0, 0.0, 0.125);
// FPS Background
addBGSquare(-0.45, -0.45, 0.45, 0.25, 0.01, 0.0, 0.0, 0.4);
// 30 FPS line
addBGSquare(-0.45, fpsToY(30), 0.45, fpsToY(32), 0.015, 0.5, 0.0, 0.5);
// 60 FPS line
addBGSquare(-0.45, fpsToY(60), 0.45, fpsToY(62), 0.015, 0.2, 0.0, 0.75);
this.fpsVertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(fpsVerts), gl.DYNAMIC_DRAW);
this.fpsIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(fpsIndices), gl.STATIC_DRAW);
this.fpsIndexCount = fpsIndices.length;
};
Stats.prototype.begin = function() {
this.startTime = now();
};
Stats.prototype.end = function() {
var time = now();
this.frames++;
if (time > this.prevTime + 250) {
this.fps = Math.round((this.frames * 1000) / (time - this.prevTime));
this.updateGraph(this.fps);
this.prevTime = time;
this.frames = 0;
}
};
Stats.prototype.updateGraph = function(value) {
var gl = this.gl;
var color = fpsToRGB(value);
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
// Update the current segment with the new FPS value
var updateVerts = [
segmentToX(this.lastSegment), fpsToY(value), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(value), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
];
// Re-shape the next segment into the green "progress" line
color.r = 0.2;
color.g = 1.0;
color.b = 0.2;
if (this.lastSegment == segments - 1) {
// If we're updating the last segment we need to do two bufferSubDatas
// to update the segment and turn the first segment into the progress line.
gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
updateVerts = [
segmentToX(0), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(0), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(.25), fpsToY(0), 0.02, color.r, color.g, color.b
];
gl.bufferSubData(gl.ARRAY_BUFFER, 0, new Float32Array(updateVerts));
} else {
updateVerts.push(
segmentToX(this.lastSegment+1), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1.25), fpsToY(0), 0.02, color.r, color.g, color.b
);
gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
}
this.lastSegment = (this.lastSegment+1) % segments;
};
Stats.prototype.render = function(projectionMat, modelViewMat) {
var gl = this.gl;
// Render text first, minor win for early fragment discard
mat4_multiply(this.modelViewMatrix, modelViewMat, this.textMatrix);
this.sevenSegmentText.render(projectionMat, this.modelViewMatrix, this.fps + " FP5");
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, modelViewMat);
gl.enableVertexAttribArray(this.attribs.position);
gl.enableVertexAttribArray(this.attribs.color);
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
gl.vertexAttribPointer(this.attribs.position, 3, gl.FLOAT, false, 24, 0);
gl.vertexAttribPointer(this.attribs.color, 3, gl.FLOAT, false, 24, 12);
// Draw the graph and background in a single call
gl.drawElements(gl.TRIANGLES, this.fpsIndexCount, gl.UNSIGNED_SHORT, 0);
}
Stats.prototype.renderOrtho = function(x, y, width, height) {
var canvas = this.gl.canvas;
if (x == undefined || y == undefined) {
x = 10 * window.devicePixelRatio;
y = 10 * window.devicePixelRatio;
}
if (width == undefined || height == undefined) {
width = 75 * window.devicePixelRatio;
height = 75 * window.devicePixelRatio;
}
mat4_ortho(this.orthoProjMatrix, 0, canvas.width, 0, canvas.height, 0.1, 1024);
mat4_identity(this.orthoViewMatrix);
mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [x, canvas.height - height - y, -1]);
mat4_scale(this.orthoViewMatrix, this.orthoViewMatrix, [width, height, 1]);
mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [0.5, 0.5, 0]);
this.render(this.orthoProjMatrix, this.orthoViewMatrix);
}
return Stats;
})();

View file

@ -0,0 +1,687 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Handles loading of textures of mutliple formats, tries to be efficent about it.
Formats supported will vary by devices. Use the .supports<format>() functions
to determine if a format is supported. Most of the time you can just call
loader.loadTexture("url"); and it will handle it based on the extension.
If the extension can't be relied on use the corresponding
.load<Extension>("url") calls.
*/
var WGLUTextureLoader = (function() {
"use strict";
//============================//
// DXT constants and utilites //
//============================//
// Utility functions
// Builds a numeric code for a given fourCC string
function fourCCToInt32(value) {
return value.charCodeAt(0) +
(value.charCodeAt(1) << 8) +
(value.charCodeAt(2) << 16) +
(value.charCodeAt(3) << 24);
}
// Turns a fourCC numeric code into a string
function int32ToFourCC(value) {
return String.fromCharCode(
value & 0xff,
(value >> 8) & 0xff,
(value >> 16) & 0xff,
(value >> 24) & 0xff
);
}
// Calcualates the size of a compressed texture level in bytes
function textureLevelSize(format, width, height) {
switch (format) {
case COMPRESSED_RGB_S3TC_DXT1_EXT:
case COMPRESSED_RGB_ATC_WEBGL:
case COMPRESSED_RGB_ETC1_WEBGL:
return ((width + 3) >> 2) * ((height + 3) >> 2) * 8;
case COMPRESSED_RGBA_S3TC_DXT3_EXT:
case COMPRESSED_RGBA_S3TC_DXT5_EXT:
case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
return ((width + 3) >> 2) * ((height + 3) >> 2) * 16;
case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
return Math.floor((Math.max(width, 8) * Math.max(height, 8) * 4 + 7) / 8);
case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
return Math.floor((Math.max(width, 16) * Math.max(height, 8) * 2 + 7) / 8);
default:
return 0;
}
}
// DXT formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_s3tc/
var COMPRESSED_RGB_S3TC_DXT1_EXT = 0x83F0;
var COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1;
var COMPRESSED_RGBA_S3TC_DXT3_EXT = 0x83F2;
var COMPRESSED_RGBA_S3TC_DXT5_EXT = 0x83F3;
// ATC formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_atc/
var COMPRESSED_RGB_ATC_WEBGL = 0x8C92;
var COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL = 0x8C93;
var COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL = 0x87EE;
// DXT values and structures referenced from:
// http://msdn.microsoft.com/en-us/library/bb943991.aspx/
var DDS_MAGIC = 0x20534444;
var DDSD_MIPMAPCOUNT = 0x20000;
var DDPF_FOURCC = 0x4;
var DDS_HEADER_LENGTH = 31; // The header length in 32 bit ints.
// Offsets into the header array.
var DDS_HEADER_MAGIC = 0;
var DDS_HEADER_SIZE = 1;
var DDS_HEADER_FLAGS = 2;
var DDS_HEADER_HEIGHT = 3;
var DDS_HEADER_WIDTH = 4;
var DDS_HEADER_MIPMAPCOUNT = 7;
var DDS_HEADER_PF_FLAGS = 20;
var DDS_HEADER_PF_FOURCC = 21;
// FourCC format identifiers.
var FOURCC_DXT1 = fourCCToInt32("DXT1");
var FOURCC_DXT3 = fourCCToInt32("DXT3");
var FOURCC_DXT5 = fourCCToInt32("DXT5");
var FOURCC_ATC = fourCCToInt32("ATC ");
var FOURCC_ATCA = fourCCToInt32("ATCA");
var FOURCC_ATCI = fourCCToInt32("ATCI");
//==================//
// Crunch constants //
//==================//
// Taken from crnlib.h
var CRN_FORMAT = {
cCRNFmtInvalid: -1,
cCRNFmtDXT1: 0,
// cCRNFmtDXT3 is not currently supported when writing to CRN - only DDS.
cCRNFmtDXT3: 1,
cCRNFmtDXT5: 2
// Crunch supports more formats than this, but we can't use them here.
};
// Mapping of Crunch formats to DXT formats.
var DXT_FORMAT_MAP = {};
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT1] = COMPRESSED_RGB_S3TC_DXT1_EXT;
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT3] = COMPRESSED_RGBA_S3TC_DXT3_EXT;
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT5] = COMPRESSED_RGBA_S3TC_DXT5_EXT;
//===============//
// PVR constants //
//===============//
// PVR formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_pvrtc/
var COMPRESSED_RGB_PVRTC_4BPPV1_IMG = 0x8C00;
var COMPRESSED_RGB_PVRTC_2BPPV1_IMG = 0x8C01;
var COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = 0x8C02;
var COMPRESSED_RGBA_PVRTC_2BPPV1_IMG = 0x8C03;
// ETC1 format, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_etc1/
var COMPRESSED_RGB_ETC1_WEBGL = 0x8D64;
var PVR_FORMAT_2BPP_RGB = 0;
var PVR_FORMAT_2BPP_RGBA = 1;
var PVR_FORMAT_4BPP_RGB = 2;
var PVR_FORMAT_4BPP_RGBA = 3;
var PVR_FORMAT_ETC1 = 6;
var PVR_FORMAT_DXT1 = 7;
var PVR_FORMAT_DXT3 = 9;
var PVR_FORMAT_DXT5 = 5;
var PVR_HEADER_LENGTH = 13; // The header length in 32 bit ints.
var PVR_MAGIC = 0x03525650; //0x50565203;
// Offsets into the header array.
var PVR_HEADER_MAGIC = 0;
var PVR_HEADER_FORMAT = 2;
var PVR_HEADER_HEIGHT = 6;
var PVR_HEADER_WIDTH = 7;
var PVR_HEADER_MIPMAPCOUNT = 11;
var PVR_HEADER_METADATA = 12;
//============//
// Misc Utils //
//============//
// When an error occurs set the texture to a 1x1 black pixel
// This prevents WebGL errors from attempting to use unrenderable textures
// and clears out stale data if we're re-using a texture.
function clearOnError(gl, error, texture, callback) {
if (console) {
console.error(error);
}
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1, 1, 0, gl.RGB, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0]));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
// Notify the user that an error occurred and the texture is ready.
if (callback) { callback(texture, error, null); }
}
function isPowerOfTwo(n) {
return (n & (n - 1)) === 0;
}
function getExtension(gl, name) {
var vendorPrefixes = ["", "WEBKIT_", "MOZ_"];
var ext = null;
for (var i in vendorPrefixes) {
ext = gl.getExtension(vendorPrefixes[i] + name);
if (ext) { break; }
}
return ext;
}
//==================//
// DDS File Reading //
//==================//
// Parse a DDS file and provide information about the raw DXT data it contains to the given callback.
function parseDDS(arrayBuffer, callback, errorCallback) {
// Callbacks must be provided.
if (!callback || !errorCallback) { return; }
// Get a view of the arrayBuffer that represents the DDS header.
var header = new Int32Array(arrayBuffer, 0, DDS_HEADER_LENGTH);
// Do some sanity checks to make sure this is a valid DDS file.
if(header[DDS_HEADER_MAGIC] != DDS_MAGIC) {
errorCallback("Invalid magic number in DDS header");
return 0;
}
if(!header[DDS_HEADER_PF_FLAGS] & DDPF_FOURCC) {
errorCallback("Unsupported format, must contain a FourCC code");
return 0;
}
// Determine what type of compressed data the file contains.
var fourCC = header[DDS_HEADER_PF_FOURCC];
var internalFormat;
switch(fourCC) {
case FOURCC_DXT1:
internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
break;
case FOURCC_DXT3:
internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
break;
case FOURCC_DXT5:
internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
break;
case FOURCC_ATC:
internalFormat = COMPRESSED_RGB_ATC_WEBGL;
break;
case FOURCC_ATCA:
internalFormat = COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL;
break;
case FOURCC_ATCI:
internalFormat = COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL;
break;
default:
errorCallback("Unsupported FourCC code: " + int32ToFourCC(fourCC));
return;
}
// Determine how many mipmap levels the file contains.
var levels = 1;
if(header[DDS_HEADER_FLAGS] & DDSD_MIPMAPCOUNT) {
levels = Math.max(1, header[DDS_HEADER_MIPMAPCOUNT]);
}
// Gather other basic metrics and a view of the raw the DXT data.
var width = header[DDS_HEADER_WIDTH];
var height = header[DDS_HEADER_HEIGHT];
var dataOffset = header[DDS_HEADER_SIZE] + 4;
var dxtData = new Uint8Array(arrayBuffer, dataOffset);
// Pass the DXT information to the callback for uploading.
callback(dxtData, width, height, levels, internalFormat);
}
//==================//
// PVR File Reading //
//==================//
// Parse a PVR file and provide information about the raw texture data it contains to the given callback.
function parsePVR(arrayBuffer, callback, errorCallback) {
// Callbacks must be provided.
if (!callback || !errorCallback) { return; }
// Get a view of the arrayBuffer that represents the DDS header.
var header = new Int32Array(arrayBuffer, 0, PVR_HEADER_LENGTH);
// Do some sanity checks to make sure this is a valid DDS file.
if(header[PVR_HEADER_MAGIC] != PVR_MAGIC) {
errorCallback("Invalid magic number in PVR header");
return 0;
}
// Determine what type of compressed data the file contains.
var format = header[PVR_HEADER_FORMAT];
var internalFormat;
switch(format) {
case PVR_FORMAT_2BPP_RGB:
internalFormat = COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
break;
case PVR_FORMAT_2BPP_RGBA:
internalFormat = COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
break;
case PVR_FORMAT_4BPP_RGB:
internalFormat = COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
break;
case PVR_FORMAT_4BPP_RGBA:
internalFormat = COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
break;
case PVR_FORMAT_ETC1:
internalFormat = COMPRESSED_RGB_ETC1_WEBGL;
break;
case PVR_FORMAT_DXT1:
internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
break;
case PVR_FORMAT_DXT3:
internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
break;
case PVR_FORMAT_DXT5:
internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
break;
default:
errorCallback("Unsupported PVR format: " + format);
return;
}
// Gather other basic metrics and a view of the raw the DXT data.
var width = header[PVR_HEADER_WIDTH];
var height = header[PVR_HEADER_HEIGHT];
var levels = header[PVR_HEADER_MIPMAPCOUNT];
var dataOffset = header[PVR_HEADER_METADATA] + 52;
var pvrtcData = new Uint8Array(arrayBuffer, dataOffset);
// Pass the PVRTC information to the callback for uploading.
callback(pvrtcData, width, height, levels, internalFormat);
}
//=============//
// IMG loading //
//=============//
/*
This function provides a method for loading webgl textures using a pool of
image elements, which has very low memory overhead. For more details see:
http://blog.tojicode.com/2012/03/javascript-memory-optimization-and.html
*/
var loadImgTexture = (function createTextureLoader() {
var MAX_CACHE_IMAGES = 16;
var textureImageCache = new Array(MAX_CACHE_IMAGES);
var cacheTop = 0;
var remainingCacheImages = MAX_CACHE_IMAGES;
var pendingTextureRequests = [];
var TextureImageLoader = function(loadedCallback) {
var self = this;
var blackPixel = new Uint8Array([0, 0, 0]);
this.gl = null;
this.texture = null;
this.callback = null;
this.image = new Image();
this.image.crossOrigin = 'anonymous';
this.image.addEventListener('load', function() {
var gl = self.gl;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
var startTime = Date.now();
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, self.image);
if (isPowerOfTwo(self.image.width) && isPowerOfTwo(self.image.height)) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
gl.generateMipmap(gl.TEXTURE_2D);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
var uploadTime = Date.now() - startTime;
if(self.callback) {
var stats = {
width: self.image.width,
height: self.image.height,
internalFormat: gl.RGBA,
levelZeroSize: self.image.width * self.image.height * 4,
uploadTime: uploadTime
};
self.callback(self.texture, null, stats);
}
loadedCallback(self);
}, false);
this.image.addEventListener('error', function(ev) {
clearOnError(self.gl, 'Image could not be loaded: ' + self.image.src, self.texture, self.callback);
loadedCallback(self);
}, false);
};
TextureImageLoader.prototype.loadTexture = function(gl, src, texture, callback) {
this.gl = gl;
this.texture = texture;
this.callback = callback;
this.image.src = src;
};
var PendingTextureRequest = function(gl, src, texture, callback) {
this.gl = gl;
this.src = src;
this.texture = texture;
this.callback = callback;
};
function releaseTextureImageLoader(til) {
var req;
if(pendingTextureRequests.length) {
req = pendingTextureRequests.shift();
til.loadTexture(req.gl, req.src, req.texture, req.callback);
} else {
textureImageCache[cacheTop++] = til;
}
}
return function(gl, src, texture, callback) {
var til;
if(cacheTop) {
til = textureImageCache[--cacheTop];
til.loadTexture(gl, src, texture, callback);
} else if (remainingCacheImages) {
til = new TextureImageLoader(releaseTextureImageLoader);
til.loadTexture(gl, src, texture, callback);
--remainingCacheImages;
} else {
pendingTextureRequests.push(new PendingTextureRequest(gl, src, texture, callback));
}
return texture;
};
})();
//=====================//
// TextureLoader Class //
//=====================//
// This class is our public interface.
var TextureLoader = function(gl) {
this.gl = gl;
// Load the compression format extensions, if available
this.dxtExt = getExtension(gl, "WEBGL_compressed_texture_s3tc");
this.pvrtcExt = getExtension(gl, "WEBGL_compressed_texture_pvrtc");
this.atcExt = getExtension(gl, "WEBGL_compressed_texture_atc");
this.etc1Ext = getExtension(gl, "WEBGL_compressed_texture_etc1");
// Returns whether or not the compressed format is supported by the WebGL implementation
TextureLoader.prototype._formatSupported = function(format) {
switch (format) {
case COMPRESSED_RGB_S3TC_DXT1_EXT:
case COMPRESSED_RGBA_S3TC_DXT3_EXT:
case COMPRESSED_RGBA_S3TC_DXT5_EXT:
return !!this.dxtExt;
case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
return !!this.pvrtcExt;
case COMPRESSED_RGB_ATC_WEBGL:
case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
return !!this.atcExt;
case COMPRESSED_RGB_ETC1_WEBGL:
return !!this.etc1Ext;
default:
return false;
}
}
// Uploads compressed texture data to the GPU.
TextureLoader.prototype._uploadCompressedData = function(data, width, height, levels, internalFormat, texture, callback) {
var gl = this.gl;
gl.bindTexture(gl.TEXTURE_2D, texture);
var offset = 0;
var stats = {
width: width,
height: height,
internalFormat: internalFormat,
levelZeroSize: textureLevelSize(internalFormat, width, height),
uploadTime: 0
};
var startTime = Date.now();
// Loop through each mip level of compressed texture data provided and upload it to the given texture.
for (var i = 0; i < levels; ++i) {
// Determine how big this level of compressed texture data is in bytes.
var levelSize = textureLevelSize(internalFormat, width, height);
// Get a view of the bytes for this level of DXT data.
var dxtLevel = new Uint8Array(data.buffer, data.byteOffset + offset, levelSize);
// Upload!
gl.compressedTexImage2D(gl.TEXTURE_2D, i, internalFormat, width, height, 0, dxtLevel);
// The next mip level will be half the height and width of this one.
width = width >> 1;
height = height >> 1;
// Advance the offset into the compressed texture data past the current mip level's data.
offset += levelSize;
}
stats.uploadTime = Date.now() - startTime;
// We can't use gl.generateMipmaps with compressed textures, so only use
// mipmapped filtering if the compressed texture data contained mip levels.
if (levels > 1) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
// Notify the user that the texture is ready.
if (callback) { callback(texture, null, stats); }
}
TextureLoader.prototype.supportsDXT = function() {
return !!this.dxtExt;
}
TextureLoader.prototype.supportsPVRTC = function() {
return !!this.pvrtcExt;
}
TextureLoader.prototype.supportsATC = function() {
return !!this.atcExt;
}
TextureLoader.prototype.supportsETC1 = function() {
return !!this.etc1Ext;
}
// Loads a image file into the given texture.
// Supports any format that can be loaded into an img tag
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadIMG = function(src, texture, callback) {
if(!texture) {
texture = this.gl.createTexture();
}
loadImgTexture(gl, src, texture, callback);
return texture;
}
// Loads a DDS file into the given texture.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadDDS = function(src, texture, callback) {
var self = this;
if (!texture) {
texture = this.gl.createTexture();
}
// Load the file via XHR.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function (ev) {
if (xhr.status == 200) {
// If the file loaded successfully parse it.
parseDDS(xhr.response, function(dxtData, width, height, levels, internalFormat) {
if (!self._formatSupported(internalFormat)) {
clearOnError(self.gl, "Texture format not supported", texture, callback);
return;
}
// Upload the parsed DXT data to the texture.
self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
}, function(error) {
clearOnError(self.gl, error, texture, callback);
});
} else {
clearOnError(self.gl, xhr.statusText, texture, callback);
}
}, false);
xhr.open('GET', src, true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
return texture;
}
// Loads a PVR file into the given texture.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadPVR = function(src, texture, callback) {
var self = this;
if(!texture) {
texture = this.gl.createTexture();
}
// Load the file via XHR.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function (ev) {
if (xhr.status == 200) {
// If the file loaded successfully parse it.
parsePVR(xhr.response, function(dxtData, width, height, levels, internalFormat) {
if (!self._formatSupported(internalFormat)) {
clearOnError(self.gl, "Texture format not supported", texture, callback);
return;
}
// Upload the parsed PVR data to the texture.
self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
}, function(error) {
clearOnError(self.gl, error, texture, callback);
});
} else {
clearOnError(self.gl, xhr.statusText, texture, callback);
}
}, false);
xhr.open('GET', src, true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
return texture;
}
// Loads a texture from a file. Guesses the type based on extension.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadTexture = function(src, texture, callback) {
// Shamelessly lifted from StackOverflow :)
// http://stackoverflow.com/questions/680929
var re = /(?:\.([^.]+))?$/;
var ext = re.exec(src)[1] || '';
ext = ext.toLowerCase();
switch(ext) {
case 'dds':
return this.loadDDS(src, texture, callback);
case 'pvr':
return this.loadPVR(src, texture, callback);
default:
return this.loadIMG(src, texture, callback);
}
}
// Sets a texture to a solid RGBA color
// If no texture is provided one is created and returned.
TextureLoader.prototype.makeSolidColor = function(r, g, b, a, texture) {
var gl = this.gl;
var data = new Uint8Array([r, g, b, a]);
if(!texture) {
texture = gl.createTexture();
}
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
return texture;
}
}
return TextureLoader;
})();

View file

@ -0,0 +1,94 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Provides a simple way to get values from the query string if they're present
and use a default value if not. Not strictly a "WebGL" utility, but I use it
frequently enough for debugging that I wanted to include it here.
Example:
For the URL http://example.com/index.html?particleCount=1000
WGLUUrl.getInt("particleCount", 100); // URL overrides, returns 1000
WGLUUrl.getInt("particleSize", 10); // Not in URL, returns default of 10
*/
var WGLUUrl = (function() {
"use strict";
var urlArgs = null;
function ensureArgsCached() {
if (!urlArgs) {
urlArgs = {};
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i = 0; i < vars.length; i++) {
var pair = vars[i].split("=");
urlArgs[pair[0].toLowerCase()] = unescape(pair[1]);
}
}
}
function getString(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return urlArgs[lcaseName];
}
return defaultValue;
}
function getInt(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseInt(urlArgs[lcaseName], 10);
}
return defaultValue;
}
function getFloat(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseFloat(urlArgs[lcaseName]);
}
return defaultValue;
}
function getBool(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseInt(urlArgs[lcaseName], 10) != 0;
}
return defaultValue;
}
return {
getString: getString,
getInt: getInt,
getFloat: getFloat,
getBool: getBool
};
})();

View file

@ -0,0 +1,284 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function (VRAudioPanner) {
'use strict';
// Default settings for panning. Cone parameters are experimentally
// determined.
var _PANNING_MODEL = 'HRTF';
var _DISTANCE_MODEL = 'inverse';
var _CONE_INNER_ANGLE = 60;
var _CONE_OUTER_ANGLE = 120;
var _CONE_OUTER_GAIN = 0.25;
// Super-simple web audio version detection.
var _LEGACY_WEBAUDIO = window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext');
if (_LEGACY_WEBAUDIO)
console.log('[VRAudioPanner] outdated version of Web Audio API detected.');
// Master audio context.
var _context = _LEGACY_WEBAUDIO ? new webkitAudioContext() : new AudioContext();
/**
* A buffer source player with HRTF panning for testing purpose.
* @param {Object} options Default options.
* @param {Number} options.gain Sound object gain. (0.0~1.0)
* @param {Number} options.buffer AudioBuffer to play.
* @param {Number} options.detune Detune parameter. (cent)
* @param {Array} options.position x, y, z position in a array.
*/
function TestSource (options) {
this._src = _context.createBufferSource();
this._out = _context.createGain();
this._panner = _context.createPanner();
this._analyser = _context.createAnalyser();
this._src.connect(this._out);
this._out.connect(this._analyser);
this._analyser.connect(this._panner);
this._panner.connect(_context.destination);
this._src.buffer = options.buffer;
this._src.loop = true;
this._out.gain.value = options.gain;
this._analyser.fftSize = 1024;
this._analyser.smoothingTimeConstant = 0.85;
this._lastRMSdB = 0.0;
this._panner.panningModel = _PANNING_MODEL;
this._panner.distanceModel = _DISTANCE_MODEL;
this._panner.coneInnerAngle = _CONE_INNER_ANGLE;
this._panner.coneOuterAngle = _CONE_OUTER_ANGLE;
this._panner.coneOuterGain = _CONE_OUTER_GAIN;
this._position = [0, 0, 0];
this._orientation = [1, 0, 0];
this._analyserBuffer = new Uint8Array(this._analyser.fftSize);
if (!_LEGACY_WEBAUDIO) {
this._src.detune.value = (options.detune || 0);
this._analyserBuffer = new Float32Array(this._analyser.fftSize);
}
this.setPosition(options.position);
this.setOrientation(options.orientation);
};
TestSource.prototype.start = function () {
this._src.start(0);
};
TestSource.prototype.stop = function () {
this._src.stop(0);
};
TestSource.prototype.getPosition = function () {
return this._position;
};
TestSource.prototype.setPosition = function (position) {
if (position) {
this._position[0] = position[0];
this._position[1] = position[1];
this._position[2] = position[2];
}
this._panner.setPosition.apply(this._panner, this._position);
};
TestSource.prototype.getOrientation = function () {
return this._orientation;
};
TestSource.prototype.setOrientation = function (orientation) {
if (orientation) {
this._orientation[0] = orientation[0];
this._orientation[1] = orientation[1];
this._orientation[2] = orientation[2];
}
this._panner.setOrientation.apply(this._panner, this._orientation);
};
TestSource.prototype.getCubeScale = function () {
// Safari does not support getFloatTimeDomainData(), so fallback to the
// naive spectral energy sum. This is relative expensive.
if (_LEGACY_WEBAUDIO) {
this._analyser.getByteFrequencyData(this._analyserBuffer);
for (var k = 0, total = 0; k < this._analyserBuffer.length; ++k)
total += this._analyserBuffer[k];
total /= this._analyserBuffer.length;
return (total / 256.0) * 1.5;
}
this._analyser.getFloatTimeDomainData(this._analyserBuffer);
for (var i = 0, sum = 0; i < this._analyserBuffer.length; ++i)
sum += this._analyserBuffer[i] * this._analyserBuffer[i];
// Calculate RMS and convert it to DB for perceptual loudness.
var rms = Math.sqrt(sum / this._analyserBuffer.length);
var db = 30 + 10 / Math.LN10 * Math.log(rms <= 0 ? 0.0001 : rms);
// Moving average with the alpha of 0.525. Experimentally determined.
this._lastRMSdB += 0.525 * ((db < 0 ? 0 : db) - this._lastRMSdB);
// Scaling by 1/30 is also experimentally determined.
return this._lastRMSdB / 30.0;
};
// Internal helper: load a file into a buffer. (github.com/hoch/spiral)
function _loadAudioFile(context, fileInfo, done) {
var xhr = new XMLHttpRequest();
xhr.open('GET', fileInfo.url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
if (xhr.status === 200) {
context.decodeAudioData(xhr.response,
function (buffer) {
console.log('[VRAudioPanner] File loaded: ' + fileInfo.url);
done(fileInfo.name, buffer);
},
function (message) {
console.log('[VRAudioPanner] Decoding failure: ' + fileInfo.url + ' (' + message + ')');
done(fileInfo.name, null);
});
} else {
console.log('[VRAudioPanner] XHR Error: ' + fileInfo.url + ' (' + xhr.statusText + ')');
done(fileInfo.name, null);
}
};
xhr.onerror = function (event) {
console.log('[VRAudioPanner] XHR Network failure: ' + fileInfo.url);
done(fileInfo.name, null);
};
xhr.send();
}
/**
* A wrapper/container class for multiple file loaders.
* @param {Object} context AudioContext
* @param {Object} audioFileData Audio file info in the format of {name, url}
* @param {Function} resolve Resolution handler for promise.
* @param {Function} reject Rejection handler for promise.
* @param {Function} progress Progress event handler.
*/
function AudioBufferManager(context, audioFileData, resolve, reject, progress) {
this._context = context;
this._resolve = resolve;
this._reject = reject;
this._progress = progress;
this._buffers = new Map();
this._loadingTasks = {};
// Iterating file loading.
for (var i = 0; i < audioFileData.length; i++) {
var fileInfo = audioFileData[i];
// Check for duplicates filename and quit if it happens.
if (this._loadingTasks.hasOwnProperty(fileInfo.name)) {
console.log('[VRAudioPanner] Duplicated filename in AudioBufferManager: ' + fileInfo.name);
return;
}
// Mark it as pending (0)
this._loadingTasks[fileInfo.name] = 0;
_loadAudioFile(this._context, fileInfo, this._done.bind(this));
}
}
AudioBufferManager.prototype._done = function (filename, buffer) {
// Label the loading task.
this._loadingTasks[filename] = buffer !== null ? 'loaded' : 'failed';
// A failed task will be a null buffer.
this._buffers.set(filename, buffer);
this._updateProgress(filename);
};
AudioBufferManager.prototype._updateProgress = function (filename) {
var numberOfFinishedTasks = 0, numberOfFailedTask = 0;
var numberOfTasks = 0;
for (var task in this._loadingTasks) {
numberOfTasks++;
if (this._loadingTasks[task] === 'loaded')
numberOfFinishedTasks++;
else if (this._loadingTasks[task] === 'failed')
numberOfFailedTask++;
}
if (typeof this._progress === 'function')
this._progress(filename, numberOfFinishedTasks, numberOfTasks);
if (numberOfFinishedTasks === numberOfTasks)
this._resolve(this._buffers);
if (numberOfFinishedTasks + numberOfFailedTask === numberOfTasks)
this._reject(this._buffers);
};
/**
* Returns true if the web audio implementation is outdated.
* @return {Boolean}
*/
VRAudioPanner.isWebAudioOutdated = function () {
return _LEGACY_WEBAUDIO;
}
/**
* Static method for updating listener's position.
* @param {Array} position Listener position in x, y, z.
*/
VRAudioPanner.setListenerPosition = function (position) {
_context.listener.setPosition.apply(_context.listener, position);
};
/**
* Static method for updating listener's orientation.
* @param {Array} orientation Listener orientation in x, y, z.
* @param {Array} orientation Listener's up vector in x, y, z.
*/
VRAudioPanner.setListenerOrientation = function (orientation, upvector) {
_context.listener.setOrientation(
orientation[0], orientation[1], orientation[2],
upvector[0], upvector[1], upvector[2]);
};
/**
* Load an audio file asynchronously.
* @param {Array} dataModel Audio file info in the format of {name, url}
* @param {Function} onprogress Callback function for reporting the progress.
* @return {Promise} Promise.
*/
VRAudioPanner.loadAudioFiles = function (dataModel, onprogress) {
return new Promise(function (resolve, reject) {
new AudioBufferManager(_context, dataModel, resolve, reject, onprogress);
});
};
/**
* Create a source player. See TestSource class for parameter description.
* @return {TestSource}
*/
VRAudioPanner.createTestSource = function (options) {
return new TestSource(options);
};
})(VRAudioPanner = {});

View file

@ -0,0 +1,210 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
/*
Like CubeSea, but designed around a users physical space. One central platform
that maps to the users play area and several floating cubes that sit just
those boundries (just to add visual interest)
*/
window.VRCubeIsland = (function () {
"use strict";
var cubeIslandVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var cubeIslandFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var CubeIsland = function (gl, texture, width, depth) {
this.gl = gl;
this.statsMat = mat4.create();
this.texture = texture;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(cubeIslandVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(cubeIslandFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
this.vertBuffer = gl.createBuffer();
this.indexBuffer = gl.createBuffer();
this.resize(width, depth);
};
CubeIsland.prototype.resize = function (width, depth) {
var gl = this.gl;
this.width = width;
this.depth = depth;
var cubeVerts = [];
var cubeIndices = [];
// Build a single box.
function appendBox (left, bottom, back, right, top, front) {
// Bottom
var idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(left, bottom, back, 0.0, 1.0);
cubeVerts.push(right, bottom, back, 1.0, 1.0);
cubeVerts.push(right, bottom, front, 1.0, 0.0);
cubeVerts.push(left, bottom, front, 0.0, 0.0);
// Top
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, top, back, 0.0, 0.0);
cubeVerts.push(right, top, back, 1.0, 0.0);
cubeVerts.push(right, top, front, 1.0, 1.0);
cubeVerts.push(left, top, front, 0.0, 1.0);
// Left
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, bottom, back, 0.0, 1.0);
cubeVerts.push(left, top, back, 0.0, 0.0);
cubeVerts.push(left, top, front, 1.0, 0.0);
cubeVerts.push(left, bottom, front, 1.0, 1.0);
// Right
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(right, bottom, back, 1.0, 1.0);
cubeVerts.push(right, top, back, 1.0, 0.0);
cubeVerts.push(right, top, front, 0.0, 0.0);
cubeVerts.push(right, bottom, front, 0.0, 1.0);
// Back
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, bottom, back, 1.0, 1.0);
cubeVerts.push(right, bottom, back, 0.0, 1.0);
cubeVerts.push(right, top, back, 0.0, 0.0);
cubeVerts.push(left, top, back, 1.0, 0.0);
// Front
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(left, bottom, front, 0.0, 1.0);
cubeVerts.push(right, bottom, front, 1.0, 1.0);
cubeVerts.push(right, top, front, 1.0, 0.0);
cubeVerts.push(left, top, front, 0.0, 0.0);
}
// Appends a cube with the given centerpoint and size.
function appendCube (x, y, z, size) {
var halfSize = size * 0.5;
appendBox(x - halfSize, y - halfSize, z - halfSize,
x + halfSize, y + halfSize, z + halfSize);
}
// Main "island", covers where the user can safely stand. Top of the cube
// (the ground the user stands on) should be at Y=0 to align with users
// floor. X=0 and Z=0 should be at the center of the users play space.
appendBox(-width * 0.5, -width, -depth * 0.5, width * 0.5, 0, depth * 0.5);
// A sprinkling of other cubes to make things more visually interesting.
appendCube(1.1, 0.3, (-depth * 0.5) - 0.8, 0.5);
appendCube(-0.5, 1.0, (-depth * 0.5) - 0.9, 0.75);
appendCube(0.6, 1.5, (-depth * 0.5) - 0.6, 0.4);
appendCube(-1.0, 0.5, (-depth * 0.5) - 0.5, 0.2);
appendCube((-width * 0.5) - 0.8, 0.3, -1.1, 0.5);
appendCube((-width * 0.5) - 0.9, 1.0, 0.5, 0.75);
appendCube((-width * 0.5) - 0.6, 1.5, -0.6, 0.4);
appendCube((-width * 0.5) - 0.5, 0.5, 1.0, 0.2);
appendCube((width * 0.5) + 0.8, 0.3, 1.1, 0.5);
appendCube((width * 0.5) + 0.9, 1.0, -0.5, 0.75);
appendCube((width * 0.5) + 0.6, 1.5, 0.6, 0.4);
appendCube((width * 0.5) + 0.5, 0.5, -1.0, 0.2);
appendCube(1.1, 1.4, (depth * 0.5) + 0.8, 0.5);
appendCube(-0.5, 1.0, (depth * 0.5) + 0.9, 0.75);
appendCube(0.6, 0.4, (depth * 0.5) + 0.6, 0.4);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
this.indexCount = cubeIndices.length;
};
CubeIsland.prototype.render = function (projectionMat, modelViewMat, stats) {
var gl = this.gl;
var program = this.program;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
if (stats) {
// To ensure that the FPS counter is visible in VR mode we have to
// render it as part of the scene.
mat4.fromTranslation(this.statsMat, [0, 1.5, -this.depth * 0.5]);
mat4.scale(this.statsMat, this.statsMat, [0.5, 0.5, 0.5]);
mat4.rotateX(this.statsMat, this.statsMat, -0.75);
mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
stats.render(projectionMat, this.statsMat);
}
};
return CubeIsland;
})();

View file

@ -0,0 +1,188 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
window.VRCubeSea = (function () {
"use strict";
var cubeSeaVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var cubeSeaFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var CubeSea = function (gl, texture) {
this.gl = gl;
this.statsMat = mat4.create();
this.texture = texture;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(cubeSeaVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(cubeSeaFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
var cubeVerts = [];
var cubeIndices = [];
// Build a single cube.
function appendCube (x, y, z) {
if (!x && !y && !z) {
// Don't create a cube in the center.
return;
}
var size = 0.2;
// Bottom
var idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y - size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y - size, z + size, 0.0, 0.0);
// Top
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0);
cubeVerts.push(x + size, y + size, z + size, 1.0, 1.0);
cubeVerts.push(x - size, y + size, z + size, 0.0, 1.0);
// Left
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x - size, y + size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y - size, z + size, 1.0, 1.0);
// Right
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0);
cubeVerts.push(x + size, y + size, z + size, 0.0, 0.0);
cubeVerts.push(x + size, y - size, z + size, 0.0, 1.0);
// Back
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x + size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x - size, y + size, z - size, 1.0, 0.0);
// Front
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x - size, y - size, z + size, 0.0, 1.0);
cubeVerts.push(x + size, y - size, z + size, 1.0, 1.0);
cubeVerts.push(x + size, y + size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y + size, z + size, 0.0, 0.0);
}
var gridSize = 10;
// Build the cube sea
for (var x = 0; x < gridSize; ++x) {
for (var y = 0; y < gridSize; ++y) {
for (var z = 0; z < gridSize; ++z) {
appendCube(x - (gridSize / 2), y - (gridSize / 2), z - (gridSize / 2));
}
}
}
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
this.indexCount = cubeIndices.length;
};
var mortimer = mat4.create();
var a = [0.9868122935295105, -0.03754837438464165, -0.15745431184768677, 0, 0.011360996402800083, 0.9863911271095276, -0.1640235036611557, 0, 0.16147033870220184, 0.16007155179977417, 0.9738093614578247, 0, 0.192538782954216, 0.024526841938495636, -0.001076754298992455, 1.0000001192092896];
for (var i = 0; i < 16; ++i) {
mortimer[i] = a[i];
}
CubeSea.prototype.render = function (projectionMat, modelViewMat, stats) {
var gl = this.gl;
var program = this.program;
//mat4.invert(mortimer, modelViewMat);
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
if (stats) {
// To ensure that the FPS counter is visible in VR mode we have to
// render it as part of the scene.
mat4.fromTranslation(this.statsMat, [0, -0.3, -0.5]);
mat4.scale(this.statsMat, this.statsMat, [0.3, 0.3, 0.3]);
mat4.rotateX(this.statsMat, this.statsMat, -0.75);
mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
stats.render(projectionMat, this.statsMat);
}
};
return CubeSea;
})();

View file

@ -0,0 +1,219 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
window.VRPanorama = (function () {
"use strict";
var panoVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var panoFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var Panorama = function (gl) {
this.gl = gl;
this.texture = gl.createTexture();
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(panoVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(panoFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
var panoVerts = [];
var panoIndices = [];
var radius = 2; // 2 meter radius sphere
var latSegments = 40;
var lonSegments = 40;
// Create the vertices
for (var i=0; i <= latSegments; ++i) {
var theta = i * Math.PI / latSegments;
var sinTheta = Math.sin(theta);
var cosTheta = Math.cos(theta);
for (var j=0; j <= lonSegments; ++j) {
var phi = j * 2 * Math.PI / lonSegments;
var sinPhi = Math.sin(phi);
var cosPhi = Math.cos(phi);
var x = sinPhi * sinTheta;
var y = cosTheta;
var z = -cosPhi * sinTheta;
var u = (j / lonSegments);
var v = (i / latSegments);
panoVerts.push(x * radius, y * radius, z * radius, u, v);
}
}
// Create the indices
for (var i = 0; i < latSegments; ++i) {
var offset0 = i * (lonSegments+1);
var offset1 = (i+1) * (lonSegments+1);
for (var j = 0; j < lonSegments; ++j) {
var index0 = offset0+j;
var index1 = offset1+j;
panoIndices.push(
index0, index1, index0+1,
index1, index1+1, index0+1
);
}
}
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(panoVerts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(panoIndices), gl.STATIC_DRAW);
this.indexCount = panoIndices.length;
this.imgElement = null;
this.videoElement = null;
};
Panorama.prototype.setImage = function (url) {
var gl = this.gl;
var self = this;
return new Promise(function(resolve, reject) {
var img = new Image();
img.addEventListener('load', function() {
self.imgElement = img;
self.videoElement = null;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, img);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
resolve(self.imgElement);
});
img.addEventListener('error', function(ev) {
console.error(ev.message);
reject(ev.message);
}, false);
img.crossOrigin = 'anonymous';
img.src = url;
});
};
Panorama.prototype.setVideo = function (url) {
var gl = this.gl;
var self = this;
return new Promise(function(resolve, reject) {
var video = document.createElement('video');
video.addEventListener('canplay', function() {
// Added "click to play" UI?
});
video.addEventListener('playing', function() {
self.videoElement = video;
self.imgElement = null;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, self.videoElement);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
resolve(self.videoElement);
});
video.addEventListener('error', function(ev) {
console.error(video.error);
reject(video.error);
}, false);
video.loop = true;
video.autoplay = true;
video.crossOrigin = 'anonymous';
video.setAttribute('webkit-playsinline', '');
video.src = url;
});
};
Panorama.prototype.play = function() {
if (this.videoElement)
this.videoElement.play();
};
Panorama.prototype.pause = function() {
if (this.videoElement)
this.videoElement.pause();
};
Panorama.prototype.isPaused = function() {
if (this.videoElement)
return this.videoElement.paused;
return false;
};
Panorama.prototype.render = function (projectionMat, modelViewMat) {
var gl = this.gl;
var program = this.program;
if (!this.imgElement && !this.videoElement)
return;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
if (this.videoElement && !this.videoElement.paused) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.videoElement);
}
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
};
return Panorama;
})();

View file

@ -0,0 +1,181 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
window.VRSamplesUtil = (function () {
"use strict";
// Lifted from the WebVR Polyfill
function isMobile () {
return /Android/i.test(navigator.userAgent) ||
/iPhone|iPad|iPod/i.test(navigator.userAgent);
}
function getMessageContainer () {
var messageContainer = document.getElementById("vr-sample-message-container");
if (!messageContainer) {
messageContainer = document.createElement("div");
messageContainer.id = "vr-sample-message-container";
messageContainer.style.fontFamily = "sans-serif";
messageContainer.style.position = "absolute";
messageContainer.style.zIndex = "999";
messageContainer.style.left = "0";
messageContainer.style.top = "0";
messageContainer.style.right = "0";
messageContainer.style.margin = "0";
messageContainer.style.padding = "0";
messageContainer.align = "center";
document.body.appendChild(messageContainer);
}
return messageContainer;
}
function addMessageElement (message, backgroundColor) {
var messageElement = document.createElement("div");
messageElement.classList.add = "vr-sample-message";
messageElement.style.color = "#FFF";
messageElement.style.backgroundColor = backgroundColor;
messageElement.style.borderRadius = "3px";
messageElement.style.position = "relative";
messageElement.style.display = "inline-block";
messageElement.style.margin = "0.5em";
messageElement.style.padding = "0.75em";
messageElement.innerHTML = message;
getMessageContainer().appendChild(messageElement);
return messageElement;
}
// Makes the given element fade out and remove itself from the DOM after the
// given timeout.
function makeToast (element, timeout) {
element.style.transition = "opacity 0.5s ease-in-out";
element.style.opacity = "1";
setTimeout(function () {
element.style.opacity = "0";
setTimeout(function () {
if (element.parentElement)
element.parentElement.removeChild(element);
}, 500);
}, timeout);
}
function addError (message, timeout) {
var element = addMessageElement("<b>ERROR:</b> " + message, "#D33");
if (timeout) {
makeToast(element, timeout);
}
return element;
}
function addInfo (message, timeout) {
var element = addMessageElement(message, "#22A");
if (timeout) {
makeToast(element, timeout);
}
return element;
}
function getButtonContainer () {
var buttonContainer = document.getElementById("vr-sample-button-container");
if (!buttonContainer) {
buttonContainer = document.createElement("div");
buttonContainer.id = "vr-sample-button-container";
buttonContainer.style.fontFamily = "sans-serif";
buttonContainer.style.position = "absolute";
buttonContainer.style.zIndex = "999";
buttonContainer.style.left = "0";
buttonContainer.style.bottom = "0";
buttonContainer.style.right = "0";
buttonContainer.style.margin = "0";
buttonContainer.style.padding = "0";
buttonContainer.align = "right";
document.body.appendChild(buttonContainer);
}
return buttonContainer;
}
function addButtonElement (message, key, icon) {
var buttonElement = document.createElement("div");
buttonElement.classList.add = "vr-sample-button";
buttonElement.style.color = "#FFF";
buttonElement.style.fontWeight = "bold";
buttonElement.style.backgroundColor = "#888";
buttonElement.style.borderRadius = "5px";
buttonElement.style.border = "3px solid #555";
buttonElement.style.position = "relative";
buttonElement.style.display = "inline-block";
buttonElement.style.margin = "0.5em";
buttonElement.style.padding = "0.75em";
buttonElement.style.cursor = "pointer";
buttonElement.align = "center";
if (icon) {
buttonElement.innerHTML = "<img src='" + icon + "'/><br/>" + message;
} else {
buttonElement.innerHTML = message;
}
if (key) {
var keyElement = document.createElement("span");
keyElement.classList.add = "vr-sample-button-accelerator";
keyElement.style.fontSize = "0.75em";
keyElement.style.fontStyle = "italic";
keyElement.innerHTML = " (" + key + ")";
buttonElement.appendChild(keyElement);
}
getButtonContainer().appendChild(buttonElement);
return buttonElement;
}
function addButton (message, key, icon, callback) {
var keyListener = null;
if (key) {
var keyCode = key.charCodeAt(0);
keyListener = function (event) {
if (event.keyCode === keyCode) {
callback(event);
}
};
document.addEventListener("keydown", keyListener, false);
}
var element = addButtonElement(message, key, icon);
element.addEventListener("click", function (event) {
callback(event);
event.preventDefault();
}, false);
return {
element: element,
keyListener: keyListener
};
}
function removeButton (button) {
if (!button)
return;
if (button.element.parentElement)
button.element.parentElement.removeChild(button.element);
if (button.keyListener)
document.removeEventListener("keydown", button.keyListener, false);
}
return {
isMobile: isMobile,
addError: addError,
addInfo: addInfo,
addButton: addButton,
removeButton: removeButton,
makeToast: makeToast
};
})();

Binary file not shown.

After

Width:  |  Height:  |  Size: 788 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

View file

@ -0,0 +1,312 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>05 - Room Scale</title>
<!--
This sample demonstrates how to create scenes that align with the space
physically available to the user (when that information is available).
-->
<style>
#webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-debug-geometry.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-island.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script>
/* global mat4, vec3, VRCubeIsland, WGLUDebugGeometry, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var PLAYER_HEIGHT = 1.65;
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ===================================================
// WebGL scene setup. This code is not WebVR specific.
// ===================================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var gl = null;
var cubeIsland = null;
var stats = null;
var debugGeom = null;
function initWebGL (preserveDrawingBuffer) {
var glAttribs = {
alpha: false,
antialias: false, //!VRSamplesUtil.isMobile(),
preserveDrawingBuffer: false //preserveDrawingBuffer
};
gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
// If the VRDisplay doesn't have stageParameters we won't know
// how big the users play space. Construct a scene around a
// default space size like 2 meters by 2 meters as a placeholder.
cubeIsland = new VRCubeIsland(gl, texture, 2, 2);
stats = new WGLUStats(gl);
debugGeom = new WGLUDebugGeometry(gl);
// Wait until we have a WebGL context to resize and start rendering.
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
// ================================
// WebVR-specific code begins here.
// ================================
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
initWebGL(true);
if (vrDisplay.stageParameters &&
vrDisplay.stageParameters.sizeX > 0 &&
vrDisplay.stageParameters.sizeZ > 0) {
// If we have stageParameters with a valid size use that to resize
// our scene to match the users available space more closely. The
// check for size > 0 is necessary because some devices, like the
// Oculus Rift, can give you a standing space coordinate but don't
// have a configured play area. These devices will return a stage
// size of 0.
cubeIsland.resize(vrDisplay.stageParameters.sizeX, vrDisplay.stageParameters.sizeZ);
} else {
if (vrDisplay.stageParameters) {
VRSamplesUtil.addInfo("VRDisplay reported stageParameters, but stage size was 0. Using default size.", 3000);
} else {
VRSamplesUtil.addInfo("VRDisplay did not report stageParameters", 3000);
}
}
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
} else {
initWebGL(false);
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
// Get a matrix for the pose that takes into account the stageParameters
// if we have them, and otherwise adjusts the position to ensure we're
// not stuck in the floor.
function getStandingViewMatrix (out, view) {
if (vrDisplay.stageParameters) {
// If the headset provides stageParameters use the
// sittingToStandingTransform to transform the view matrix into a
// space where the floor in the center of the users play space is the
// origin.
mat4.invert(out, vrDisplay.stageParameters.sittingToStandingTransform);
mat4.multiply(out, view, out);
} else {
// Otherwise you'll want to translate the view to compensate for the
// scene floor being at Y=0. Ideally this should match the user's
// height (you may want to make it configurable). For this demo we'll
// just assume all human beings are 1.65 meters (~5.4ft) tall.
mat4.identity(out);
mat4.translate(out, out, [0, PLAYER_HEIGHT, 0]);
mat4.invert(out, out);
mat4.multiply(out, view, out);
}
}
function renderSceneView (projection, view, pose) {
cubeIsland.render(projection, view, stats);
// For fun, draw a blue cube where the players head would have been if
// we weren't taking the stageParameters into account. It'll start in
// the center of the floor.
var orientation = pose.orientation;
var position = pose.position;
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!position) { position = [0, 0, 0]; }
debugGeom.bind(projection, view);
debugGeom.drawCube(orientation, position, 0.2, [0, 0, 1, 1]);
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
renderSceneView(frameData.leftProjectionMatrix, viewMat, frameData.pose);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
getStandingViewMatrix(viewMat, frameData.rightViewMatrix);
renderSceneView(frameData.rightProjectionMatrix, viewMat, frameData.pose);
vrDisplay.submitFrame();
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
renderSceneView(projectionMat, viewMat, frameData.pose);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
mat4.translate(viewMat, viewMat, [0, -PLAYER_HEIGHT, 0]);
cubeIsland.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>

View file

@ -0,0 +1,262 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>04 - Simple Mirroring</title>
<!--
This sample demonstrates how to mirror content to an external display
while presenting to a VRDisplay.
-->
<style>
#webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ================================
// WebVR-specific code begins here.
// ================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var gl = null;
var cubeSea = null;
var stats = null;
function initWebGL (preserveDrawingBuffer) {
// Setting preserveDrawingBuffer to true prevents the canvas from being
// implicitly cleared when calling submitFrame or compositing the canvas
// on the document. For the simplest form of mirroring we want to create
// the canvas with that option enabled. Note that this may incur a
// performance penalty, as it may imply that additional copies of the
// canvas backbuffer need to be made. As a result, we ONLY want to set
// that if we know the VRDisplay has an external display, which is why
// we defer WebGL initialization until after we've gotten results back
// from navigator.getDisplays and know which device we'll be
// presenting with.
var glAttribs = {
alpha: false,
antialias: false, //!VRSamplesUtil.isMobile(),
preserveDrawingBuffer: false //preserveDrawingBuffer
};
gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
cubeSea = new VRCubeSea(gl, texture);
stats = new WGLUStats(gl);
// Wait until we have a WebGL context to resize and start rendering.
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
// Only use preserveDrawingBuffer if we have an external display to
// mirror to.
initWebGL(vrDisplay.capabilities.hasExternalDisplay);
} else {
initWebGL(false);
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
// No VR means no mirroring, so create WebGL content without
// preserveDrawingBuffer
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
// If we're presenting we want to use the drawing buffer size
// recommended by the VRDevice, since that will ensure the best
// results post-distortion.
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
// For simplicity we're going to render both eyes at the same size,
// even if one eye needs less resolution. You can render each eye at
// the exact size it needs, but you'll need to adjust the viewports to
// account for that.
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
// We only want to change the size of the canvas drawing buffer to
// match the window dimensions when we're not presenting.
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
vrDisplay.submitFrame();
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>

View file

@ -0,0 +1,307 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>03 - VR Presentation</title>
<!--
This sample demonstrates how to present the contents of a WebGL canvas to
a VRDisplay. The content is not mirrored on the main display while being
presented.
-->
<style>
#webgl-canvas, #presenting-message {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
#presenting-message {
color: white;
font-family: sans-serif;
font-size: 2em;
font-weight: bold;
z-index: 1;
text-align: center;
padding: 0.5em;
background-color: #444;
display: none;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<div id="presenting-message">Put on your headset now</div>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ===================================================
// WebGL scene setup. This code is not WebVR specific.
// ===================================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var glAttribs = {
alpha: false,
antialias: false //!VRSamplesUtil.isMobile()
};
var gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
var cubeSea = new VRCubeSea(gl, texture);
var stats = new WGLUStats(gl);
var presentingMessage = document.getElementById("presenting-message");
// ================================
// WebVR-specific code begins here.
// ================================
function onVRRequestPresent () {
// This can only be called in response to a user gesture.
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
onVRPresentChange();
// Nothing to do because we're handling things in onVRPresentChange.
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
// No sense in exiting presentation if we're not actually presenting.
// (This may happen if we get an event like vrdisplaydeactivate when
// we weren't presenting.)
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
// Nothing to do because we're handling things in onVRPresentChange.
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
// When we begin or end presenting, the canvas should be resized to the
// recommended dimensions for the display.
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
// Because we're not mirroring any images on an external screen will
// freeze while presenting. It's better to replace it with a message
// indicating that content is being shown on the VRDisplay.
presentingMessage.style.display = "block";
// On devices with an external display the UA may not provide a way
// to exit VR presentation mode, so we should provide one ourselves.
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
// If we have an external display take down the presenting message and
// change the button back to "Enter VR".
if (vrDisplay.capabilities.hasExternalDisplay) {
presentingMessage.style.display = "";
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
// It's heighly reccommended that you set the near and far planes to
// something appropriate for your scene so the projection matricies
// WebVR produces have a well scaled depth buffer.
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
// Generally, you want to wait until VR support is confirmed and
// you know the user has a VRDisplay capable of presenting connected
// before adding UI that advertises VR features.
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
// The UA may kick us out of VR present mode for any reason, so to
// ensure we always know when we begin/end presenting we need to
// listen for vrdisplaypresentchange events.
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
// These events fire when the user agent has had some indication that
// it would be appropariate to enter or exit VR presentation mode, such
// as the user putting on a headset and triggering a proximity sensor.
// You can inspect the `reason` property of the event to learn why the
// event was fired, but in this case we're going to always trust the
// event and enter or exit VR presentation mode when asked.
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
} else {
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
// If we're presenting we want to use the drawing buffer size
// recommended by the VRDevice, since that will ensure the best
// results post-distortion.
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
// For simplicity we're going to render both eyes at the same size,
// even if one eye needs less resolution. You can render each eye at
// the exact size it needs, but you'll need to adjust the viewports to
// account for that.
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
// We only want to change the size of the canvas drawing buffer to
// match the window dimensions when we're not presenting.
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
window.addEventListener("resize", onResize, false);
onResize();
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
// When presenting content to the VRDisplay we want to update at its
// refresh rate if it differs from the refresh rate of the main
// display. Calling VRDisplay.requestAnimationFrame ensures we render
// at the right speed for VR.
vrDisplay.requestAnimationFrame(onAnimationFrame);
// As a general rule you want to get the pose as late as possible
// and call VRDisplay.submitFrame as early as possible after
// retrieving the pose. Do any work for the frame that doesn't need
// to know the pose earlier to ensure the lowest latency possible.
//var pose = vrDisplay.getPose();
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
// When presenting render a stereo view.
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
// If we're currently presenting to the VRDisplay we need to
// explicitly indicate we're done rendering.
vrDisplay.submitFrame();
} else {
// When not presenting render a mono view that still takes pose into
// account.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
// It's best to use our own projection matrix in this case, but we can use the left eye's view matrix
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
window.requestAnimationFrame(onAnimationFrame);
})();
</script>
</body>
</html>