WebVR API Implementation, r=larsbergstrom

This commit is contained in:
Imanol Fernandez 2016-12-16 18:39:35 +01:00
parent 13826970c4
commit c5705bff50
70 changed files with 13044 additions and 20 deletions

View file

@ -0,0 +1,320 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>07 - Advanced Mirroring</title>
<!--
This sample demonstrates how to display a different view of the scene on
an external monitor than is being viewed through the headset.
-->
<style>
#webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-debug-geometry.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-island.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script>
/* global mat4, vec3, VRCubeIsland, WGLUDebugGeometry, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var PLAYER_HEIGHT = 1.65;
var vrDisplay = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var poseMat = mat4.create();
var tmpMat = mat4.create();
var vrPresentButton = null;
var orientation = [0, 0, 0, 1];
var position = [0, 0, 0];
// ===================================================
// WebGL scene setup. This code is not WebVR specific.
// ===================================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var gl = null;
var cubeIsland = null;
var stats = null;
var debugGeom = null;
function initWebGL () {
var glAttribs = {
alpha: false,
antialias: false //!VRSamplesUtil.isMobile()
// When doing mirroring like this, do NOT turn on PreserveDrawingBuffer!
};
gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
// Using cubeIsland for this sample because it's easier to see from a
// third person view.
cubeIsland = new VRCubeIsland(gl, texture, 2, 2);
stats = new WGLUStats(gl);
debugGeom = new WGLUDebugGeometry(gl);
// Wait until we have a WebGL context to resize and start rendering.
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
// ================================
// WebVR-specific code begins here.
// ================================
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
var frameData;
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
initWebGL();
if (vrDisplay.stageParameters &&
vrDisplay.stageParameters.sizeX > 0 &&
vrDisplay.stageParameters.sizeZ > 0) {
cubeIsland.resize(vrDisplay.stageParameters.sizeX, vrDisplay.stageParameters.sizeZ);
}
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
} else {
initWebGL();
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
initWebGL();
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
initWebGL();
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
function getStandingViewMatrix (out, view) {
if (vrDisplay.stageParameters) {
mat4.invert(out, vrDisplay.stageParameters.sittingToStandingTransform);
mat4.multiply(out, view, out);
} else {
mat4.identity(out);
mat4.translate(out, out, [0, PLAYER_HEIGHT, 0]);
mat4.invert(out, out);
mat4.multiply(out, view, out);
}
}
function getPoseMatrix (out, pose) {
orientation = pose.orientation;
position = pose.position;
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!position) { position = [0, 0, 0]; }
mat4.fromRotationTranslation(tmpMat, orientation, position);
mat4.invert(tmpMat, tmpMat);
getStandingViewMatrix(out, tmpMat);
mat4.invert(out, out);
}
function renderSceneThirdPersonView (pose) {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
// Set up the camera in the back left corner of the island
mat4.identity(viewMat);
mat4.translate(viewMat, viewMat, [-2, 2.5, 2]);
mat4.rotateY(viewMat, viewMat, Math.PI * -0.25);
mat4.rotateX(viewMat, viewMat, Math.PI * -0.15);
mat4.invert(viewMat, viewMat);
cubeIsland.render(projectionMat, viewMat, stats);
// Render a debug view of the headset's position
if (pose) {
getPoseMatrix(poseMat, pose);
mat4.getTranslation(position, poseMat);
mat4.getRotation(orientation, poseMat);
debugGeom.bind(projectionMat, viewMat);
debugGeom.drawCube(orientation, position, 0.2, [0, 1, 0, 1]);
}
stats.renderOrtho();
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if(vrDisplay.isPresenting) {
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
cubeIsland.render(frameData.leftProjectionMatrix, viewMat, stats);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
getStandingViewMatrix(viewMat, frameData.rightViewMatrix);
cubeIsland.render(frameData.rightProjectionMatrix, viewMat, stats);
// VRDisplay.submitFrame
vrDisplay.submitFrame();
// If we have an external display we can render a different version
// of the scene entirely after calling submitFrame and it will be
// shown on the page. Depending on the content this can be expensive
// so this technique should only be used when it will not interfere
// with the performance of the VR rendering.
if (vrDisplay.capabilities.hasExternalDisplay) {
renderSceneThirdPersonView(frameData.pose);
}
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
cubeIsland.render(projectionMat, viewMat, stats);
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
renderSceneThirdPersonView(null);
}
stats.end();
}
})();
</script>
</body>
</html>

View file

@ -0,0 +1,312 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>08 - Dynamic Resolution</title>
<!--
This sample demonstrates how to efficiently adjust the resolution of your
WebVR scene on the fly using the layer bounds. Based off sample 4b.
-->
<style>
body {
background-color: black;
}
#canvas-clip, #webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
#canvas-clip.presenting {
overflow: hidden;
bottom: 0;
right: 0;
margin: auto;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<div id="canvas-clip">
<canvas id="webgl-canvas"></canvas>
</div>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ================================
// WebVR-specific code begins here.
// ================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var canvasClip = document.getElementById("canvas-clip");
var gl = null;
var cubeSea = null;
var stats = null;
function initWebGL (preserveDrawingBuffer) {
var glAttribs = {
alpha: false,
antialias: false, //!VRSamplesUtil.isMobile(),
preserveDrawingBuffer: false //preserveDrawingBuffer
};
gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
cubeSea = new VRCubeSea(gl, texture);
stats = new WGLUStats(gl);
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
resolutionMultiplier = 1.0;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
canvasClip.classList.add("presenting");
var leftEye = vrDisplay.getEyeParameters("left");
canvasClip.style.width = (leftEye.renderWidth/2) + "px";
canvasClip.style.height = (leftEye.renderHeight/2) + "px";
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
canvasClip.classList.remove("presenting");
canvasClip.style.width = "";
canvasClip.style.height = "";
webglCanvas.style.width = "";
webglCanvas.style.height = "";
}
}
// Make sure the canvas is resized AFTER we've updated the container div.
onResize();
}
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
initWebGL(vrDisplay.capabilities.hasExternalDisplay);
} else {
initWebGL(false);
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
// How
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
// How large our frame should be in relation to the recommended render
// target size.
var resolutionMultiplier = 1.0;
var eyeWidth, eyeHeight;
var lastAdjustment = 0;
function adjustResolution(t) {
// Update the resolution every quarter second
if (t - lastAdjustment < 100)
return;
lastAdjustment = t;
// Modify the resolution we are rendering at over time on a sin wave.
// In the real world this would probably be based on scene complexity.
// Oscillates between 1.0 to 0.5.
resolutionMultiplier = (Math.sin(t / 1000) * 0.25) + 0.75;
eyeWidth = webglCanvas.width * 0.5 * resolutionMultiplier;
eyeHeight = webglCanvas.height * resolutionMultiplier;
// Layer bounds are described in UV space, so 0.0 to 1.0
var boundsWidth = 0.5 * resolutionMultiplier;
var boundsHeight = resolutionMultiplier;
// Tell the presenting display about the new texture bounds. This
// ensures it only picks up the parts of the texture we're going to be
// rendering to and avoids the need to resize the WebGL canvas, which
// can be a slow operation. Because we're already presenting when we
// call requestPresent again it only updates the VRLayer information and
// doesn't require a user gesture.
vrDisplay.requestPresent([{
source: webglCanvas,
leftBounds: [0.0, 0.0, boundsWidth, boundsHeight],
rightBounds: [boundsWidth, 0.0, boundsWidth, boundsHeight],
}]);
// To ensure our mirrored content also shows up correctly we'll scale
// the canvas display size to be scaled appropriately such that it
// continues to only show one eye.
webglCanvas.style.width = (1.0/resolutionMultiplier) * 200 + "%";
webglCanvas.style.height = (1.0/resolutionMultiplier) * 100 + "%";
//webglCanvas.style.marginTop = ((eyeHeight - webglCanvas.height)* resolutionMultiplier) + "px";
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
adjustResolution(t);
// Note that the viewports use the eyeWidth/height rather than the
// canvas width and height.
gl.viewport(0, webglCanvas.height-eyeHeight, eyeWidth, eyeHeight);
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
gl.viewport(eyeWidth, webglCanvas.height-eyeHeight, eyeWidth, eyeHeight);
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
vrDisplay.submitFrame();
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,270 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
var WGLUDebugGeometry = (function() {
"use strict";
var debugGeomVS = [
"uniform mat4 projectionMat;",
"uniform mat4 viewMat;",
"uniform mat4 modelMat;",
"attribute vec3 position;",
"void main() {",
" gl_Position = projectionMat * viewMat * modelMat * vec4( position, 1.0 );",
"}",
].join("\n");
var debugGeomFS = [
"precision mediump float;",
"uniform vec4 color;",
"void main() {",
" gl_FragColor = color;",
"}",
].join("\n");
var DebugGeometry = function(gl) {
this.gl = gl;
this.projMat = mat4.create();
this.viewMat = mat4.create();
this.modelMat = mat4.create();
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(debugGeomVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(debugGeomFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({ position: 0 });
this.program.link();
var verts = [];
var indices = [];
//
// Cube Geometry
//
this.cubeIndexOffset = indices.length;
var size = 0.5;
// Bottom
var idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(-size, -size, -size);
verts.push(+size, -size, -size);
verts.push(+size, -size, +size);
verts.push(-size, -size, +size);
// Top
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, +size, -size);
verts.push(+size, +size, -size);
verts.push(+size, +size, +size);
verts.push(-size, +size, +size);
// Left
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, -size, -size);
verts.push(-size, +size, -size);
verts.push(-size, +size, +size);
verts.push(-size, -size, +size);
// Right
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(+size, -size, -size);
verts.push(+size, +size, -size);
verts.push(+size, +size, +size);
verts.push(+size, -size, +size);
// Back
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, -size, -size);
verts.push(+size, -size, -size);
verts.push(+size, +size, -size);
verts.push(-size, +size, -size);
// Front
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(-size, -size, +size);
verts.push(+size, -size, +size);
verts.push(+size, +size, +size);
verts.push(-size, +size, +size);
this.cubeIndexCount = indices.length - this.cubeIndexOffset;
//
// Cone Geometry
//
this.coneIndexOffset = indices.length;
var size = 0.5;
var conePointVertex = verts.length / 3.0;
var coneBaseVertex = conePointVertex+1;
var coneSegments = 16;
// Point
verts.push(0, size, 0);
// Base Vertices
for (var i = 0; i < coneSegments; ++i) {
if (i > 0) {
idx = verts.length / 3.0;
indices.push(idx-1, conePointVertex, idx);
}
var rad = ((Math.PI * 2) / coneSegments) * i;
verts.push(Math.sin(rad) * (size / 2.0), -size, Math.cos(rad) * (size / 2.0));
}
// Last triangle to fill the gap
indices.push(idx, conePointVertex, coneBaseVertex);
// Base triangles
for (var i = 2; i < coneSegments; ++i) {
indices.push(coneBaseVertex, coneBaseVertex+(i-1), coneBaseVertex+i);
}
this.coneIndexCount = indices.length - this.coneIndexOffset;
//
// Rect geometry
//
this.rectIndexOffset = indices.length;
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2, idx+3, idx);
verts.push(0, 0, 0);
verts.push(1, 0, 0);
verts.push(1, 1, 0);
verts.push(0, 1, 0);
this.rectIndexCount = indices.length - this.rectIndexOffset;
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
};
DebugGeometry.prototype.bind = function(projectionMat, viewMat) {
var gl = this.gl;
var program = this.program;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.viewMat, false, viewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 12, 0);
};
DebugGeometry.prototype.bindOrtho = function() {
mat4.ortho(this.projMat, 0, this.gl.canvas.width, this.gl.canvas.height, 0, 0.1, 1024);
mat4.identity(this.viewMat);
this.bind(this.projMat, this.viewMat);
};
DebugGeometry.prototype._bindUniforms = function(orientation, position, scale, color) {
if (!position) { position = [0, 0, 0]; }
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!scale) { scale = [1, 1, 1]; }
if (!color) { color = [1, 0, 0, 1]; }
mat4.fromRotationTranslationScale(this.modelMat, orientation, position, scale);
this.gl.uniformMatrix4fv(this.program.uniform.modelMat, false, this.modelMat);
this.gl.uniform4fv(this.program.uniform.color, color);
};
DebugGeometry.prototype.drawCube = function(orientation, position, size, color) {
var gl = this.gl;
if (!size) { size = 1; }
this._bindUniforms(orientation, position, [size, size, size], color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawBox = function(orientation, position, scale, color) {
var gl = this.gl;
this._bindUniforms(orientation, position, scale, color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawBoxWithMatrix = function(mat, color) {
var gl = this.gl;
gl.uniformMatrix4fv(this.program.uniform.modelMat, false, mat);
gl.uniform4fv(this.program.uniform.color, color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawRect = function(x, y, width, height, color) {
var gl = this.gl;
this._bindUniforms(null, [x, y, -1], [width, height, 1], color);
gl.drawElements(gl.LINE_STRIP, this.rectIndexCount, gl.UNSIGNED_SHORT, this.rectIndexOffset * 2.0);
};
DebugGeometry.prototype.drawCone = function(orientation, position, size, color) {
var gl = this.gl;
if (!size) { size = 1; }
this._bindUniforms(orientation, position, [size, size, size], color);
gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
};
DebugGeometry.prototype.drawConeWithMatrix = function(mat, color) {
var gl = this.gl;
gl.uniformMatrix4fv(this.program.uniform.modelMat, false, mat);
gl.uniform4fv(this.program.uniform.color, color);
gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
};
return DebugGeometry;
})();

View file

@ -0,0 +1,162 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Caches specified GL state, runs a callback, and restores the cached state when
done.
Example usage:
var savedState = [
gl.ARRAY_BUFFER_BINDING,
// TEXTURE_BINDING_2D or _CUBE_MAP must always be followed by the texure unit.
gl.TEXTURE_BINDING_2D, gl.TEXTURE0,
gl.CLEAR_COLOR,
];
// After this call the array buffer, texture unit 0, active texture, and clear
// color will be restored. The viewport will remain changed, however, because
// gl.VIEWPORT was not included in the savedState list.
WGLUPreserveGLState(gl, savedState, function(gl) {
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, ....);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, ...);
gl.clearColor(1, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
});
Note that this is not intended to be fast. Managing state in your own code to
avoid redundant state setting and querying will always be faster. This function
is most useful for cases where you may not have full control over the WebGL
calls being made, such as tooling or effect injectors.
*/
function WGLUPreserveGLState(gl, bindings, callback) {
if (!bindings) {
callback(gl);
return;
}
var boundValues = [];
var activeTexture = null;
for (var i = 0; i < bindings.length; ++i) {
var binding = bindings[i];
switch (binding) {
case gl.TEXTURE_BINDING_2D:
case gl.TEXTURE_BINDING_CUBE_MAP:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31) {
console.error("TEXTURE_BINDING_2D or TEXTURE_BINDING_CUBE_MAP must be followed by a valid texture unit");
boundValues.push(null, null);
break;
}
if (!activeTexture) {
activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
}
gl.activeTexture(textureUnit);
boundValues.push(gl.getParameter(binding), null);
break;
case gl.ACTIVE_TEXTURE:
activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
boundValues.push(null);
break;
default:
boundValues.push(gl.getParameter(binding));
break;
}
}
callback(gl);
for (var i = 0; i < bindings.length; ++i) {
var binding = bindings[i];
var boundValue = boundValues[i];
switch (binding) {
case gl.ACTIVE_TEXTURE:
break; // Ignore this binding, since we special-case it to happen last.
case gl.ARRAY_BUFFER_BINDING:
gl.bindBuffer(gl.ARRAY_BUFFER, boundValue);
break;
case gl.COLOR_CLEAR_VALUE:
gl.clearColor(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.COLOR_WRITEMASK:
gl.colorMask(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.CURRENT_PROGRAM:
gl.useProgram(boundValue);
break;
case gl.ELEMENT_ARRAY_BUFFER_BINDING:
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, boundValue);
break;
case gl.FRAMEBUFFER_BINDING:
gl.bindFramebuffer(gl.FRAMEBUFFER, boundValue);
break;
case gl.RENDERBUFFER_BINDING:
gl.bindRenderbuffer(gl.RENDERBUFFER, boundValue);
break;
case gl.TEXTURE_BINDING_2D:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
break;
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_2D, boundValue);
break;
case gl.TEXTURE_BINDING_CUBE_MAP:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
break;
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, boundValue);
break;
case gl.VIEWPORT:
gl.viewport(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.BLEND:
case gl.CULL_FACE:
case gl.DEPTH_TEST:
case gl.SCISSOR_TEST:
case gl.STENCIL_TEST:
if (boundValue) {
gl.enable(binding);
} else {
gl.disable(binding);
}
break;
default:
console.log("No GL restore behavior for 0x" + binding.toString(16));
break;
}
if (activeTexture) {
gl.activeTexture(activeTexture);
}
}
}

View file

@ -0,0 +1,179 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Utility class to make loading shader programs easier. Does all the error
checking you typically want, automatically queries uniform and attribute
locations, and attempts to take advantage of some browser's ability to link
asynchronously by not querying any information from the program until it's
first use.
*/
var WGLUProgram = (function() {
"use strict";
// Attempts to allow the browser to asynchronously compile and link
var Program = function(gl) {
this.gl = gl;
this.program = gl.createProgram();
this.attrib = null;
this.uniform = null;
this._firstUse = true;
this._vertexShader = null;
this._fragmentShader = null;
}
Program.prototype.attachShaderSource = function(source, type) {
var gl = this.gl;
var shader;
switch (type) {
case gl.VERTEX_SHADER:
this._vertexShader = gl.createShader(type);
shader = this._vertexShader;
break;
case gl.FRAGMENT_SHADER:
this._fragmentShader = gl.createShader(type);
shader = this._fragmentShader;
break;
default:
console.Error("Invalid Shader Type:", type);
return;
}
gl.attachShader(this.program, shader);
gl.shaderSource(shader, source);
gl.compileShader(shader);
}
Program.prototype.attachShaderSourceFromXHR = function(url, type) {
var self = this;
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.addEventListener("load", function (ev) {
if (xhr.status == 200) {
self.attachShaderSource(xhr.response, type);
resolve();
} else {
reject(xhr.statusText);
}
}, false);
xhr.open("GET", url, true);
xhr.send(null);
});
}
Program.prototype.attachShaderSourceFromTag = function(tagId, type) {
var shaderTag = document.getElementById(tagId);
if (!shaderTag) {
console.error("Shader source tag not found:", tagId);
return;
}
if (!type) {
if (shaderTag.type == "x-shader/x-vertex") {
type = this.gl.VERTEX_SHADER;
} else if (shaderTag.type == "x-shader/x-fragment") {
type = this.gl.FRAGMENT_SHADER;
} else {
console.error("Invalid Shader Type:", shaderTag.type);
return;
}
}
var src = "";
var k = shaderTag.firstChild;
while (k) {
if (k.nodeType == 3) {
src += k.textContent;
}
k = k.nextSibling;
}
this.attachShaderSource(src, type);
}
Program.prototype.bindAttribLocation = function(attribLocationMap) {
var gl = this.gl;
if (attribLocationMap) {
this.attrib = {};
for (var attribName in attribLocationMap) {
gl.bindAttribLocation(this.program, attribLocationMap[attribName], attribName);
this.attrib[attribName] = attribLocationMap[attribName];
}
}
}
Program.prototype.transformFeedbackVaryings = function(varyings, type) {
gl.transformFeedbackVaryings(this.program, varyings, type);
}
Program.prototype.link = function() {
this.gl.linkProgram(this.program);
}
Program.prototype.use = function() {
var gl = this.gl;
// If this is the first time the program has been used do all the error checking and
// attrib/uniform querying needed.
if (this._firstUse) {
if (!gl.getProgramParameter(this.program, gl.LINK_STATUS)) {
if (this._vertexShader && !gl.getShaderParameter(this._vertexShader, gl.COMPILE_STATUS)) {
console.error("Vertex shader compile error:", gl.getShaderInfoLog(this._vertexShader));
} else if (this._fragmentShader && !gl.getShaderParameter(this._fragmentShader, gl.COMPILE_STATUS)) {
console.error("Fragment shader compile error:", gl.getShaderInfoLog(this._fragmentShader));
} else {
console.error("Program link error:", gl.getProgramInfoLog(this.program));
}
gl.deleteProgram(this.program);
this.program = null;
} else {
if (!this.attrib) {
this.attrib = {};
var attribCount = gl.getProgramParameter(this.program, gl.ACTIVE_ATTRIBUTES);
for (var i = 0; i < attribCount; i++) {
var attribInfo = gl.getActiveAttrib(this.program, i);
this.attrib[attribInfo.name] = gl.getAttribLocation(this.program, attribInfo.name);
}
}
this.uniform = {};
var uniformCount = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
var uniformName = "";
for (var i = 0; i < uniformCount; i++) {
var uniformInfo = gl.getActiveUniform(this.program, i);
uniformName = uniformInfo.name.replace("[0]", "");
this.uniform[uniformName] = gl.getUniformLocation(this.program, uniformName);
}
}
gl.deleteShader(this._vertexShader);
gl.deleteShader(this._fragmentShader);
this._firstUse = false;
}
gl.useProgram(this.program);
}
return Program;
})();

View file

@ -0,0 +1,649 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Heavily inspired by Mr. Doobs stats.js, this FPS counter is rendered completely
with WebGL, allowing it to be shown in cases where overlaid HTML elements aren't
usable (like WebVR), or if you want the FPS counter to be rendered as part of
your scene.
See stats-test.html for basic usage.
*/
var WGLUStats = (function() {
"use strict";
//--------------------
// glMatrix functions
//--------------------
// These functions have been copied here from glMatrix (glmatrix.net) to allow
// this file to run standalone.
var mat4_identity = function(out) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
};
var mat4_multiply = function (out, a, b) {
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15];
// Cache only the current line of the second matrix
var b0 = b[0], b1 = b[1], b2 = b[2], b3 = b[3];
out[0] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[1] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[2] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[3] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[4]; b1 = b[5]; b2 = b[6]; b3 = b[7];
out[4] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[5] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[6] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[7] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[8]; b1 = b[9]; b2 = b[10]; b3 = b[11];
out[8] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[9] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[10] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[11] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[12]; b1 = b[13]; b2 = b[14]; b3 = b[15];
out[12] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[13] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[14] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[15] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
return out;
};
var mat4_fromTranslation = function(out, v) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = v[0];
out[13] = v[1];
out[14] = v[2];
out[15] = 1;
return out;
};
var mat4_ortho = function (out, left, right, bottom, top, near, far) {
var lr = 1 / (left - right),
bt = 1 / (bottom - top),
nf = 1 / (near - far);
out[0] = -2 * lr;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = -2 * bt;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 2 * nf;
out[11] = 0;
out[12] = (left + right) * lr;
out[13] = (top + bottom) * bt;
out[14] = (far + near) * nf;
out[15] = 1;
return out;
};
var mat4_translate = function (out, a, v) {
var x = v[0], y = v[1], z = v[2],
a00, a01, a02, a03,
a10, a11, a12, a13,
a20, a21, a22, a23;
if (a === out) {
out[12] = a[0] * x + a[4] * y + a[8] * z + a[12];
out[13] = a[1] * x + a[5] * y + a[9] * z + a[13];
out[14] = a[2] * x + a[6] * y + a[10] * z + a[14];
out[15] = a[3] * x + a[7] * y + a[11] * z + a[15];
} else {
a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3];
a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7];
a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11];
out[0] = a00; out[1] = a01; out[2] = a02; out[3] = a03;
out[4] = a10; out[5] = a11; out[6] = a12; out[7] = a13;
out[8] = a20; out[9] = a21; out[10] = a22; out[11] = a23;
out[12] = a00 * x + a10 * y + a20 * z + a[12];
out[13] = a01 * x + a11 * y + a21 * z + a[13];
out[14] = a02 * x + a12 * y + a22 * z + a[14];
out[15] = a03 * x + a13 * y + a23 * z + a[15];
}
return out;
};
var mat4_scale = function(out, a, v) {
var x = v[0], y = v[1], z = v[2];
out[0] = a[0] * x;
out[1] = a[1] * x;
out[2] = a[2] * x;
out[3] = a[3] * x;
out[4] = a[4] * y;
out[5] = a[5] * y;
out[6] = a[6] * y;
out[7] = a[7] * y;
out[8] = a[8] * z;
out[9] = a[9] * z;
out[10] = a[10] * z;
out[11] = a[11] * z;
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
return out;
};
//-------------------
// Utility functions
//-------------------
function linkProgram(gl, vertexSource, fragmentSource, attribLocationMap) {
// No error checking for brevity.
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexSource);
gl.compileShader(vertexShader);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentSource);
gl.compileShader(fragmentShader);
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
for (var attribName in attribLocationMap)
gl.bindAttribLocation(program, attribLocationMap[attribName], attribName);
gl.linkProgram(program);
gl.deleteShader(vertexShader);
gl.deleteShader(fragmentShader);
return program;
}
function getProgramUniforms(gl, program) {
var uniforms = {};
var uniformCount = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS);
var uniformName = "";
for (var i = 0; i < uniformCount; i++) {
var uniformInfo = gl.getActiveUniform(program, i);
uniformName = uniformInfo.name.replace("[0]", "");
uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
}
return uniforms;
}
//----------------------------
// Seven-segment text display
//----------------------------
var sevenSegmentVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec2 position;",
"void main() {",
" gl_Position = projectionMat * modelViewMat * vec4( position, 0.0, 1.0 );",
"}",
].join("\n");
var sevenSegmentFS = [
"precision mediump float;",
"uniform vec4 color;",
"void main() {",
" gl_FragColor = color;",
"}",
].join("\n");
var SevenSegmentText = function (gl) {
this.gl = gl;
this.attribs = {
position: 0,
color: 1
};
this.program = linkProgram(gl, sevenSegmentVS, sevenSegmentFS, this.attribs);
this.uniforms = getProgramUniforms(gl, this.program);
var verts = [];
var segmentIndices = {};
var indices = [];
var width = 0.5;
var thickness = 0.25;
this.kerning = 2.0;
this.matrix = new Float32Array(16);
function defineSegment(id, left, top, right, bottom) {
var idx = verts.length / 2;
verts.push(
left, top,
right, top,
right, bottom,
left, bottom);
segmentIndices[id] = [
idx, idx+2, idx+1,
idx, idx+3, idx+2];
}
var characters = {};
this.characters = characters;
function defineCharacter(c, segments) {
var character = {
character: c,
offset: indices.length * 2,
count: 0
};
for (var i = 0; i < segments.length; ++i) {
var idx = segments[i];
var segment = segmentIndices[idx];
character.count += segment.length;
indices.push.apply(indices, segment);
}
characters[c] = character;
}
/* Segment layout is as follows:
|-0-|
3 4
|-1-|
5 6
|-2-|
*/
defineSegment(0, -1, 1, width, 1-thickness);
defineSegment(1, -1, thickness*0.5, width, -thickness*0.5);
defineSegment(2, -1, -1+thickness, width, -1);
defineSegment(3, -1, 1, -1+thickness, -thickness*0.5);
defineSegment(4, width-thickness, 1, width, -thickness*0.5);
defineSegment(5, -1, thickness*0.5, -1+thickness, -1);
defineSegment(6, width-thickness, thickness*0.5, width, -1);
defineCharacter("0", [0, 2, 3, 4, 5, 6]);
defineCharacter("1", [4, 6]);
defineCharacter("2", [0, 1, 2, 4, 5]);
defineCharacter("3", [0, 1, 2, 4, 6]);
defineCharacter("4", [1, 3, 4, 6]);
defineCharacter("5", [0, 1, 2, 3, 6]);
defineCharacter("6", [0, 1, 2, 3, 5, 6]);
defineCharacter("7", [0, 4, 6]);
defineCharacter("8", [0, 1, 2, 3, 4, 5, 6]);
defineCharacter("9", [0, 1, 2, 3, 4, 6]);
defineCharacter("A", [0, 1, 3, 4, 5, 6]);
defineCharacter("B", [1, 2, 3, 5, 6]);
defineCharacter("C", [0, 2, 3, 5]);
defineCharacter("D", [1, 2, 4, 5, 6]);
defineCharacter("E", [0, 1, 2, 4, 6]);
defineCharacter("F", [0, 1, 3, 5]);
defineCharacter("P", [0, 1, 3, 4, 5]);
defineCharacter("-", [1]);
defineCharacter(" ", []);
defineCharacter("_", [2]); // Used for undefined characters
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.DYNAMIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
};
SevenSegmentText.prototype.render = function(projectionMat, modelViewMat, text, r, g, b, a) {
var gl = this.gl;
if (r == undefined || g == undefined || b == undefined) {
r = 0.0;
g = 1.0;
b = 0.0;
}
if (a == undefined)
a = 1.0;
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
gl.uniform4f(this.uniforms.color, r, g, b, a);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(this.attribs.position);
gl.vertexAttribPointer(this.attribs.position, 2, gl.FLOAT, false, 8, 0);
text = text.toUpperCase();
var offset = 0;
for (var i = 0; i < text.length; ++i) {
var c;
if (text[i] in this.characters) {
c = this.characters[text[i]];
} else {
c = this.characters["_"];
}
if (c.count != 0) {
mat4_fromTranslation(this.matrix, [offset, 0, 0]);
mat4_multiply(this.matrix, modelViewMat, this.matrix);
gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, this.matrix);
gl.drawElements(gl.TRIANGLES, c.count, gl.UNSIGNED_SHORT, c.offset);
}
offset += this.kerning;
}
}
//-----------
// FPS Graph
//-----------
var statsVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec3 color;",
"varying vec4 vColor;",
"void main() {",
" vColor = vec4(color, 1.0);",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var statsFS = [
"precision mediump float;",
"varying vec4 vColor;",
"void main() {",
" gl_FragColor = vColor;",
"}",
].join("\n");
var segments = 30;
var maxFPS = 90;
function segmentToX(i) {
return ((0.9/segments) * i) - 0.45;
}
function fpsToY(value) {
return (Math.min(value, maxFPS) * (0.7 / maxFPS)) - 0.45;
}
function fpsToRGB(value) {
return {
r: Math.max(0.0, Math.min(1.0, 1.0 - (value/60))),
g: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15)))),
b: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15))))
};
}
var now = /*( performance && performance.now ) ? performance.now.bind( performance ) :*/ Date.now;
var Stats = function(gl) {
this.gl = gl;
this.sevenSegmentText = new SevenSegmentText(gl);
this.startTime = now();
this.prevTime = this.startTime;
this.frames = 0;
this.fps = 0;
this.orthoProjMatrix = new Float32Array(16);
this.orthoViewMatrix = new Float32Array(16);
this.modelViewMatrix = new Float32Array(16);
// Hard coded because it doesn't change:
// Scale by 0.075 in X and Y
// Translate into upper left corner w/ z = 0.02
this.textMatrix = new Float32Array([
0.075, 0, 0, 0,
0, 0.075, 0, 0,
0, 0, 1, 0,
-0.3625, 0.3625, 0.02, 1
]);
this.lastSegment = 0;
this.attribs = {
position: 0,
color: 1
};
this.program = linkProgram(gl, statsVS, statsFS, this.attribs);
this.uniforms = getProgramUniforms(gl, this.program);
var fpsVerts = [];
var fpsIndices = [];
// Graph geometry
for (var i = 0; i < segments; ++i) {
// Bar top
fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
// Bar bottom
fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
var idx = i * 4;
fpsIndices.push(idx, idx+3, idx+1,
idx+3, idx, idx+2);
}
function addBGSquare(left, bottom, right, top, z, r, g, b) {
var idx = fpsVerts.length / 6;
fpsVerts.push(left, bottom, z, r, g, b);
fpsVerts.push(right, top, z, r, g, b);
fpsVerts.push(left, top, z, r, g, b);
fpsVerts.push(right, bottom, z, r, g, b);
fpsIndices.push(idx, idx+1, idx+2,
idx, idx+3, idx+1);
};
// Panel Background
addBGSquare(-0.5, -0.5, 0.5, 0.5, 0.0, 0.0, 0.0, 0.125);
// FPS Background
addBGSquare(-0.45, -0.45, 0.45, 0.25, 0.01, 0.0, 0.0, 0.4);
// 30 FPS line
addBGSquare(-0.45, fpsToY(30), 0.45, fpsToY(32), 0.015, 0.5, 0.0, 0.5);
// 60 FPS line
addBGSquare(-0.45, fpsToY(60), 0.45, fpsToY(62), 0.015, 0.2, 0.0, 0.75);
this.fpsVertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(fpsVerts), gl.DYNAMIC_DRAW);
this.fpsIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(fpsIndices), gl.STATIC_DRAW);
this.fpsIndexCount = fpsIndices.length;
};
Stats.prototype.begin = function() {
this.startTime = now();
};
Stats.prototype.end = function() {
var time = now();
this.frames++;
if (time > this.prevTime + 250) {
this.fps = Math.round((this.frames * 1000) / (time - this.prevTime));
this.updateGraph(this.fps);
this.prevTime = time;
this.frames = 0;
}
};
Stats.prototype.updateGraph = function(value) {
var gl = this.gl;
var color = fpsToRGB(value);
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
// Update the current segment with the new FPS value
var updateVerts = [
segmentToX(this.lastSegment), fpsToY(value), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(value), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
];
// Re-shape the next segment into the green "progress" line
color.r = 0.2;
color.g = 1.0;
color.b = 0.2;
if (this.lastSegment == segments - 1) {
// If we're updating the last segment we need to do two bufferSubDatas
// to update the segment and turn the first segment into the progress line.
gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
updateVerts = [
segmentToX(0), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(0), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(.25), fpsToY(0), 0.02, color.r, color.g, color.b
];
gl.bufferSubData(gl.ARRAY_BUFFER, 0, new Float32Array(updateVerts));
} else {
updateVerts.push(
segmentToX(this.lastSegment+1), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1.25), fpsToY(0), 0.02, color.r, color.g, color.b
);
gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
}
this.lastSegment = (this.lastSegment+1) % segments;
};
Stats.prototype.render = function(projectionMat, modelViewMat) {
var gl = this.gl;
// Render text first, minor win for early fragment discard
mat4_multiply(this.modelViewMatrix, modelViewMat, this.textMatrix);
this.sevenSegmentText.render(projectionMat, this.modelViewMatrix, this.fps + " FP5");
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, modelViewMat);
gl.enableVertexAttribArray(this.attribs.position);
gl.enableVertexAttribArray(this.attribs.color);
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
gl.vertexAttribPointer(this.attribs.position, 3, gl.FLOAT, false, 24, 0);
gl.vertexAttribPointer(this.attribs.color, 3, gl.FLOAT, false, 24, 12);
// Draw the graph and background in a single call
gl.drawElements(gl.TRIANGLES, this.fpsIndexCount, gl.UNSIGNED_SHORT, 0);
}
Stats.prototype.renderOrtho = function(x, y, width, height) {
var canvas = this.gl.canvas;
if (x == undefined || y == undefined) {
x = 10 * window.devicePixelRatio;
y = 10 * window.devicePixelRatio;
}
if (width == undefined || height == undefined) {
width = 75 * window.devicePixelRatio;
height = 75 * window.devicePixelRatio;
}
mat4_ortho(this.orthoProjMatrix, 0, canvas.width, 0, canvas.height, 0.1, 1024);
mat4_identity(this.orthoViewMatrix);
mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [x, canvas.height - height - y, -1]);
mat4_scale(this.orthoViewMatrix, this.orthoViewMatrix, [width, height, 1]);
mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [0.5, 0.5, 0]);
this.render(this.orthoProjMatrix, this.orthoViewMatrix);
}
return Stats;
})();

View file

@ -0,0 +1,687 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Handles loading of textures of mutliple formats, tries to be efficent about it.
Formats supported will vary by devices. Use the .supports<format>() functions
to determine if a format is supported. Most of the time you can just call
loader.loadTexture("url"); and it will handle it based on the extension.
If the extension can't be relied on use the corresponding
.load<Extension>("url") calls.
*/
var WGLUTextureLoader = (function() {
"use strict";
//============================//
// DXT constants and utilites //
//============================//
// Utility functions
// Builds a numeric code for a given fourCC string
function fourCCToInt32(value) {
return value.charCodeAt(0) +
(value.charCodeAt(1) << 8) +
(value.charCodeAt(2) << 16) +
(value.charCodeAt(3) << 24);
}
// Turns a fourCC numeric code into a string
function int32ToFourCC(value) {
return String.fromCharCode(
value & 0xff,
(value >> 8) & 0xff,
(value >> 16) & 0xff,
(value >> 24) & 0xff
);
}
// Calcualates the size of a compressed texture level in bytes
function textureLevelSize(format, width, height) {
switch (format) {
case COMPRESSED_RGB_S3TC_DXT1_EXT:
case COMPRESSED_RGB_ATC_WEBGL:
case COMPRESSED_RGB_ETC1_WEBGL:
return ((width + 3) >> 2) * ((height + 3) >> 2) * 8;
case COMPRESSED_RGBA_S3TC_DXT3_EXT:
case COMPRESSED_RGBA_S3TC_DXT5_EXT:
case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
return ((width + 3) >> 2) * ((height + 3) >> 2) * 16;
case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
return Math.floor((Math.max(width, 8) * Math.max(height, 8) * 4 + 7) / 8);
case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
return Math.floor((Math.max(width, 16) * Math.max(height, 8) * 2 + 7) / 8);
default:
return 0;
}
}
// DXT formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_s3tc/
var COMPRESSED_RGB_S3TC_DXT1_EXT = 0x83F0;
var COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1;
var COMPRESSED_RGBA_S3TC_DXT3_EXT = 0x83F2;
var COMPRESSED_RGBA_S3TC_DXT5_EXT = 0x83F3;
// ATC formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_atc/
var COMPRESSED_RGB_ATC_WEBGL = 0x8C92;
var COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL = 0x8C93;
var COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL = 0x87EE;
// DXT values and structures referenced from:
// http://msdn.microsoft.com/en-us/library/bb943991.aspx/
var DDS_MAGIC = 0x20534444;
var DDSD_MIPMAPCOUNT = 0x20000;
var DDPF_FOURCC = 0x4;
var DDS_HEADER_LENGTH = 31; // The header length in 32 bit ints.
// Offsets into the header array.
var DDS_HEADER_MAGIC = 0;
var DDS_HEADER_SIZE = 1;
var DDS_HEADER_FLAGS = 2;
var DDS_HEADER_HEIGHT = 3;
var DDS_HEADER_WIDTH = 4;
var DDS_HEADER_MIPMAPCOUNT = 7;
var DDS_HEADER_PF_FLAGS = 20;
var DDS_HEADER_PF_FOURCC = 21;
// FourCC format identifiers.
var FOURCC_DXT1 = fourCCToInt32("DXT1");
var FOURCC_DXT3 = fourCCToInt32("DXT3");
var FOURCC_DXT5 = fourCCToInt32("DXT5");
var FOURCC_ATC = fourCCToInt32("ATC ");
var FOURCC_ATCA = fourCCToInt32("ATCA");
var FOURCC_ATCI = fourCCToInt32("ATCI");
//==================//
// Crunch constants //
//==================//
// Taken from crnlib.h
var CRN_FORMAT = {
cCRNFmtInvalid: -1,
cCRNFmtDXT1: 0,
// cCRNFmtDXT3 is not currently supported when writing to CRN - only DDS.
cCRNFmtDXT3: 1,
cCRNFmtDXT5: 2
// Crunch supports more formats than this, but we can't use them here.
};
// Mapping of Crunch formats to DXT formats.
var DXT_FORMAT_MAP = {};
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT1] = COMPRESSED_RGB_S3TC_DXT1_EXT;
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT3] = COMPRESSED_RGBA_S3TC_DXT3_EXT;
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT5] = COMPRESSED_RGBA_S3TC_DXT5_EXT;
//===============//
// PVR constants //
//===============//
// PVR formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_pvrtc/
var COMPRESSED_RGB_PVRTC_4BPPV1_IMG = 0x8C00;
var COMPRESSED_RGB_PVRTC_2BPPV1_IMG = 0x8C01;
var COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = 0x8C02;
var COMPRESSED_RGBA_PVRTC_2BPPV1_IMG = 0x8C03;
// ETC1 format, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_etc1/
var COMPRESSED_RGB_ETC1_WEBGL = 0x8D64;
var PVR_FORMAT_2BPP_RGB = 0;
var PVR_FORMAT_2BPP_RGBA = 1;
var PVR_FORMAT_4BPP_RGB = 2;
var PVR_FORMAT_4BPP_RGBA = 3;
var PVR_FORMAT_ETC1 = 6;
var PVR_FORMAT_DXT1 = 7;
var PVR_FORMAT_DXT3 = 9;
var PVR_FORMAT_DXT5 = 5;
var PVR_HEADER_LENGTH = 13; // The header length in 32 bit ints.
var PVR_MAGIC = 0x03525650; //0x50565203;
// Offsets into the header array.
var PVR_HEADER_MAGIC = 0;
var PVR_HEADER_FORMAT = 2;
var PVR_HEADER_HEIGHT = 6;
var PVR_HEADER_WIDTH = 7;
var PVR_HEADER_MIPMAPCOUNT = 11;
var PVR_HEADER_METADATA = 12;
//============//
// Misc Utils //
//============//
// When an error occurs set the texture to a 1x1 black pixel
// This prevents WebGL errors from attempting to use unrenderable textures
// and clears out stale data if we're re-using a texture.
function clearOnError(gl, error, texture, callback) {
if (console) {
console.error(error);
}
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1, 1, 0, gl.RGB, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0]));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
// Notify the user that an error occurred and the texture is ready.
if (callback) { callback(texture, error, null); }
}
function isPowerOfTwo(n) {
return (n & (n - 1)) === 0;
}
function getExtension(gl, name) {
var vendorPrefixes = ["", "WEBKIT_", "MOZ_"];
var ext = null;
for (var i in vendorPrefixes) {
ext = gl.getExtension(vendorPrefixes[i] + name);
if (ext) { break; }
}
return ext;
}
//==================//
// DDS File Reading //
//==================//
// Parse a DDS file and provide information about the raw DXT data it contains to the given callback.
function parseDDS(arrayBuffer, callback, errorCallback) {
// Callbacks must be provided.
if (!callback || !errorCallback) { return; }
// Get a view of the arrayBuffer that represents the DDS header.
var header = new Int32Array(arrayBuffer, 0, DDS_HEADER_LENGTH);
// Do some sanity checks to make sure this is a valid DDS file.
if(header[DDS_HEADER_MAGIC] != DDS_MAGIC) {
errorCallback("Invalid magic number in DDS header");
return 0;
}
if(!header[DDS_HEADER_PF_FLAGS] & DDPF_FOURCC) {
errorCallback("Unsupported format, must contain a FourCC code");
return 0;
}
// Determine what type of compressed data the file contains.
var fourCC = header[DDS_HEADER_PF_FOURCC];
var internalFormat;
switch(fourCC) {
case FOURCC_DXT1:
internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
break;
case FOURCC_DXT3:
internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
break;
case FOURCC_DXT5:
internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
break;
case FOURCC_ATC:
internalFormat = COMPRESSED_RGB_ATC_WEBGL;
break;
case FOURCC_ATCA:
internalFormat = COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL;
break;
case FOURCC_ATCI:
internalFormat = COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL;
break;
default:
errorCallback("Unsupported FourCC code: " + int32ToFourCC(fourCC));
return;
}
// Determine how many mipmap levels the file contains.
var levels = 1;
if(header[DDS_HEADER_FLAGS] & DDSD_MIPMAPCOUNT) {
levels = Math.max(1, header[DDS_HEADER_MIPMAPCOUNT]);
}
// Gather other basic metrics and a view of the raw the DXT data.
var width = header[DDS_HEADER_WIDTH];
var height = header[DDS_HEADER_HEIGHT];
var dataOffset = header[DDS_HEADER_SIZE] + 4;
var dxtData = new Uint8Array(arrayBuffer, dataOffset);
// Pass the DXT information to the callback for uploading.
callback(dxtData, width, height, levels, internalFormat);
}
//==================//
// PVR File Reading //
//==================//
// Parse a PVR file and provide information about the raw texture data it contains to the given callback.
function parsePVR(arrayBuffer, callback, errorCallback) {
// Callbacks must be provided.
if (!callback || !errorCallback) { return; }
// Get a view of the arrayBuffer that represents the DDS header.
var header = new Int32Array(arrayBuffer, 0, PVR_HEADER_LENGTH);
// Do some sanity checks to make sure this is a valid DDS file.
if(header[PVR_HEADER_MAGIC] != PVR_MAGIC) {
errorCallback("Invalid magic number in PVR header");
return 0;
}
// Determine what type of compressed data the file contains.
var format = header[PVR_HEADER_FORMAT];
var internalFormat;
switch(format) {
case PVR_FORMAT_2BPP_RGB:
internalFormat = COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
break;
case PVR_FORMAT_2BPP_RGBA:
internalFormat = COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
break;
case PVR_FORMAT_4BPP_RGB:
internalFormat = COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
break;
case PVR_FORMAT_4BPP_RGBA:
internalFormat = COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
break;
case PVR_FORMAT_ETC1:
internalFormat = COMPRESSED_RGB_ETC1_WEBGL;
break;
case PVR_FORMAT_DXT1:
internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
break;
case PVR_FORMAT_DXT3:
internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
break;
case PVR_FORMAT_DXT5:
internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
break;
default:
errorCallback("Unsupported PVR format: " + format);
return;
}
// Gather other basic metrics and a view of the raw the DXT data.
var width = header[PVR_HEADER_WIDTH];
var height = header[PVR_HEADER_HEIGHT];
var levels = header[PVR_HEADER_MIPMAPCOUNT];
var dataOffset = header[PVR_HEADER_METADATA] + 52;
var pvrtcData = new Uint8Array(arrayBuffer, dataOffset);
// Pass the PVRTC information to the callback for uploading.
callback(pvrtcData, width, height, levels, internalFormat);
}
//=============//
// IMG loading //
//=============//
/*
This function provides a method for loading webgl textures using a pool of
image elements, which has very low memory overhead. For more details see:
http://blog.tojicode.com/2012/03/javascript-memory-optimization-and.html
*/
var loadImgTexture = (function createTextureLoader() {
var MAX_CACHE_IMAGES = 16;
var textureImageCache = new Array(MAX_CACHE_IMAGES);
var cacheTop = 0;
var remainingCacheImages = MAX_CACHE_IMAGES;
var pendingTextureRequests = [];
var TextureImageLoader = function(loadedCallback) {
var self = this;
var blackPixel = new Uint8Array([0, 0, 0]);
this.gl = null;
this.texture = null;
this.callback = null;
this.image = new Image();
this.image.crossOrigin = 'anonymous';
this.image.addEventListener('load', function() {
var gl = self.gl;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
var startTime = Date.now();
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, self.image);
if (isPowerOfTwo(self.image.width) && isPowerOfTwo(self.image.height)) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
gl.generateMipmap(gl.TEXTURE_2D);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
var uploadTime = Date.now() - startTime;
if(self.callback) {
var stats = {
width: self.image.width,
height: self.image.height,
internalFormat: gl.RGBA,
levelZeroSize: self.image.width * self.image.height * 4,
uploadTime: uploadTime
};
self.callback(self.texture, null, stats);
}
loadedCallback(self);
}, false);
this.image.addEventListener('error', function(ev) {
clearOnError(self.gl, 'Image could not be loaded: ' + self.image.src, self.texture, self.callback);
loadedCallback(self);
}, false);
};
TextureImageLoader.prototype.loadTexture = function(gl, src, texture, callback) {
this.gl = gl;
this.texture = texture;
this.callback = callback;
this.image.src = src;
};
var PendingTextureRequest = function(gl, src, texture, callback) {
this.gl = gl;
this.src = src;
this.texture = texture;
this.callback = callback;
};
function releaseTextureImageLoader(til) {
var req;
if(pendingTextureRequests.length) {
req = pendingTextureRequests.shift();
til.loadTexture(req.gl, req.src, req.texture, req.callback);
} else {
textureImageCache[cacheTop++] = til;
}
}
return function(gl, src, texture, callback) {
var til;
if(cacheTop) {
til = textureImageCache[--cacheTop];
til.loadTexture(gl, src, texture, callback);
} else if (remainingCacheImages) {
til = new TextureImageLoader(releaseTextureImageLoader);
til.loadTexture(gl, src, texture, callback);
--remainingCacheImages;
} else {
pendingTextureRequests.push(new PendingTextureRequest(gl, src, texture, callback));
}
return texture;
};
})();
//=====================//
// TextureLoader Class //
//=====================//
// This class is our public interface.
var TextureLoader = function(gl) {
this.gl = gl;
// Load the compression format extensions, if available
this.dxtExt = getExtension(gl, "WEBGL_compressed_texture_s3tc");
this.pvrtcExt = getExtension(gl, "WEBGL_compressed_texture_pvrtc");
this.atcExt = getExtension(gl, "WEBGL_compressed_texture_atc");
this.etc1Ext = getExtension(gl, "WEBGL_compressed_texture_etc1");
// Returns whether or not the compressed format is supported by the WebGL implementation
TextureLoader.prototype._formatSupported = function(format) {
switch (format) {
case COMPRESSED_RGB_S3TC_DXT1_EXT:
case COMPRESSED_RGBA_S3TC_DXT3_EXT:
case COMPRESSED_RGBA_S3TC_DXT5_EXT:
return !!this.dxtExt;
case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
return !!this.pvrtcExt;
case COMPRESSED_RGB_ATC_WEBGL:
case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
return !!this.atcExt;
case COMPRESSED_RGB_ETC1_WEBGL:
return !!this.etc1Ext;
default:
return false;
}
}
// Uploads compressed texture data to the GPU.
TextureLoader.prototype._uploadCompressedData = function(data, width, height, levels, internalFormat, texture, callback) {
var gl = this.gl;
gl.bindTexture(gl.TEXTURE_2D, texture);
var offset = 0;
var stats = {
width: width,
height: height,
internalFormat: internalFormat,
levelZeroSize: textureLevelSize(internalFormat, width, height),
uploadTime: 0
};
var startTime = Date.now();
// Loop through each mip level of compressed texture data provided and upload it to the given texture.
for (var i = 0; i < levels; ++i) {
// Determine how big this level of compressed texture data is in bytes.
var levelSize = textureLevelSize(internalFormat, width, height);
// Get a view of the bytes for this level of DXT data.
var dxtLevel = new Uint8Array(data.buffer, data.byteOffset + offset, levelSize);
// Upload!
gl.compressedTexImage2D(gl.TEXTURE_2D, i, internalFormat, width, height, 0, dxtLevel);
// The next mip level will be half the height and width of this one.
width = width >> 1;
height = height >> 1;
// Advance the offset into the compressed texture data past the current mip level's data.
offset += levelSize;
}
stats.uploadTime = Date.now() - startTime;
// We can't use gl.generateMipmaps with compressed textures, so only use
// mipmapped filtering if the compressed texture data contained mip levels.
if (levels > 1) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
// Notify the user that the texture is ready.
if (callback) { callback(texture, null, stats); }
}
TextureLoader.prototype.supportsDXT = function() {
return !!this.dxtExt;
}
TextureLoader.prototype.supportsPVRTC = function() {
return !!this.pvrtcExt;
}
TextureLoader.prototype.supportsATC = function() {
return !!this.atcExt;
}
TextureLoader.prototype.supportsETC1 = function() {
return !!this.etc1Ext;
}
// Loads a image file into the given texture.
// Supports any format that can be loaded into an img tag
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadIMG = function(src, texture, callback) {
if(!texture) {
texture = this.gl.createTexture();
}
loadImgTexture(gl, src, texture, callback);
return texture;
}
// Loads a DDS file into the given texture.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadDDS = function(src, texture, callback) {
var self = this;
if (!texture) {
texture = this.gl.createTexture();
}
// Load the file via XHR.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function (ev) {
if (xhr.status == 200) {
// If the file loaded successfully parse it.
parseDDS(xhr.response, function(dxtData, width, height, levels, internalFormat) {
if (!self._formatSupported(internalFormat)) {
clearOnError(self.gl, "Texture format not supported", texture, callback);
return;
}
// Upload the parsed DXT data to the texture.
self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
}, function(error) {
clearOnError(self.gl, error, texture, callback);
});
} else {
clearOnError(self.gl, xhr.statusText, texture, callback);
}
}, false);
xhr.open('GET', src, true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
return texture;
}
// Loads a PVR file into the given texture.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadPVR = function(src, texture, callback) {
var self = this;
if(!texture) {
texture = this.gl.createTexture();
}
// Load the file via XHR.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function (ev) {
if (xhr.status == 200) {
// If the file loaded successfully parse it.
parsePVR(xhr.response, function(dxtData, width, height, levels, internalFormat) {
if (!self._formatSupported(internalFormat)) {
clearOnError(self.gl, "Texture format not supported", texture, callback);
return;
}
// Upload the parsed PVR data to the texture.
self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
}, function(error) {
clearOnError(self.gl, error, texture, callback);
});
} else {
clearOnError(self.gl, xhr.statusText, texture, callback);
}
}, false);
xhr.open('GET', src, true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
return texture;
}
// Loads a texture from a file. Guesses the type based on extension.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadTexture = function(src, texture, callback) {
// Shamelessly lifted from StackOverflow :)
// http://stackoverflow.com/questions/680929
var re = /(?:\.([^.]+))?$/;
var ext = re.exec(src)[1] || '';
ext = ext.toLowerCase();
switch(ext) {
case 'dds':
return this.loadDDS(src, texture, callback);
case 'pvr':
return this.loadPVR(src, texture, callback);
default:
return this.loadIMG(src, texture, callback);
}
}
// Sets a texture to a solid RGBA color
// If no texture is provided one is created and returned.
TextureLoader.prototype.makeSolidColor = function(r, g, b, a, texture) {
var gl = this.gl;
var data = new Uint8Array([r, g, b, a]);
if(!texture) {
texture = gl.createTexture();
}
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
return texture;
}
}
return TextureLoader;
})();

View file

@ -0,0 +1,94 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Provides a simple way to get values from the query string if they're present
and use a default value if not. Not strictly a "WebGL" utility, but I use it
frequently enough for debugging that I wanted to include it here.
Example:
For the URL http://example.com/index.html?particleCount=1000
WGLUUrl.getInt("particleCount", 100); // URL overrides, returns 1000
WGLUUrl.getInt("particleSize", 10); // Not in URL, returns default of 10
*/
var WGLUUrl = (function() {
"use strict";
var urlArgs = null;
function ensureArgsCached() {
if (!urlArgs) {
urlArgs = {};
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i = 0; i < vars.length; i++) {
var pair = vars[i].split("=");
urlArgs[pair[0].toLowerCase()] = unescape(pair[1]);
}
}
}
function getString(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return urlArgs[lcaseName];
}
return defaultValue;
}
function getInt(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseInt(urlArgs[lcaseName], 10);
}
return defaultValue;
}
function getFloat(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseFloat(urlArgs[lcaseName]);
}
return defaultValue;
}
function getBool(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseInt(urlArgs[lcaseName], 10) != 0;
}
return defaultValue;
}
return {
getString: getString,
getInt: getInt,
getFloat: getFloat,
getBool: getBool
};
})();

View file

@ -0,0 +1,284 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function (VRAudioPanner) {
'use strict';
// Default settings for panning. Cone parameters are experimentally
// determined.
var _PANNING_MODEL = 'HRTF';
var _DISTANCE_MODEL = 'inverse';
var _CONE_INNER_ANGLE = 60;
var _CONE_OUTER_ANGLE = 120;
var _CONE_OUTER_GAIN = 0.25;
// Super-simple web audio version detection.
var _LEGACY_WEBAUDIO = window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext');
if (_LEGACY_WEBAUDIO)
console.log('[VRAudioPanner] outdated version of Web Audio API detected.');
// Master audio context.
var _context = _LEGACY_WEBAUDIO ? new webkitAudioContext() : new AudioContext();
/**
* A buffer source player with HRTF panning for testing purpose.
* @param {Object} options Default options.
* @param {Number} options.gain Sound object gain. (0.0~1.0)
* @param {Number} options.buffer AudioBuffer to play.
* @param {Number} options.detune Detune parameter. (cent)
* @param {Array} options.position x, y, z position in a array.
*/
function TestSource (options) {
this._src = _context.createBufferSource();
this._out = _context.createGain();
this._panner = _context.createPanner();
this._analyser = _context.createAnalyser();
this._src.connect(this._out);
this._out.connect(this._analyser);
this._analyser.connect(this._panner);
this._panner.connect(_context.destination);
this._src.buffer = options.buffer;
this._src.loop = true;
this._out.gain.value = options.gain;
this._analyser.fftSize = 1024;
this._analyser.smoothingTimeConstant = 0.85;
this._lastRMSdB = 0.0;
this._panner.panningModel = _PANNING_MODEL;
this._panner.distanceModel = _DISTANCE_MODEL;
this._panner.coneInnerAngle = _CONE_INNER_ANGLE;
this._panner.coneOuterAngle = _CONE_OUTER_ANGLE;
this._panner.coneOuterGain = _CONE_OUTER_GAIN;
this._position = [0, 0, 0];
this._orientation = [1, 0, 0];
this._analyserBuffer = new Uint8Array(this._analyser.fftSize);
if (!_LEGACY_WEBAUDIO) {
this._src.detune.value = (options.detune || 0);
this._analyserBuffer = new Float32Array(this._analyser.fftSize);
}
this.setPosition(options.position);
this.setOrientation(options.orientation);
};
TestSource.prototype.start = function () {
this._src.start(0);
};
TestSource.prototype.stop = function () {
this._src.stop(0);
};
TestSource.prototype.getPosition = function () {
return this._position;
};
TestSource.prototype.setPosition = function (position) {
if (position) {
this._position[0] = position[0];
this._position[1] = position[1];
this._position[2] = position[2];
}
this._panner.setPosition.apply(this._panner, this._position);
};
TestSource.prototype.getOrientation = function () {
return this._orientation;
};
TestSource.prototype.setOrientation = function (orientation) {
if (orientation) {
this._orientation[0] = orientation[0];
this._orientation[1] = orientation[1];
this._orientation[2] = orientation[2];
}
this._panner.setOrientation.apply(this._panner, this._orientation);
};
TestSource.prototype.getCubeScale = function () {
// Safari does not support getFloatTimeDomainData(), so fallback to the
// naive spectral energy sum. This is relative expensive.
if (_LEGACY_WEBAUDIO) {
this._analyser.getByteFrequencyData(this._analyserBuffer);
for (var k = 0, total = 0; k < this._analyserBuffer.length; ++k)
total += this._analyserBuffer[k];
total /= this._analyserBuffer.length;
return (total / 256.0) * 1.5;
}
this._analyser.getFloatTimeDomainData(this._analyserBuffer);
for (var i = 0, sum = 0; i < this._analyserBuffer.length; ++i)
sum += this._analyserBuffer[i] * this._analyserBuffer[i];
// Calculate RMS and convert it to DB for perceptual loudness.
var rms = Math.sqrt(sum / this._analyserBuffer.length);
var db = 30 + 10 / Math.LN10 * Math.log(rms <= 0 ? 0.0001 : rms);
// Moving average with the alpha of 0.525. Experimentally determined.
this._lastRMSdB += 0.525 * ((db < 0 ? 0 : db) - this._lastRMSdB);
// Scaling by 1/30 is also experimentally determined.
return this._lastRMSdB / 30.0;
};
// Internal helper: load a file into a buffer. (github.com/hoch/spiral)
function _loadAudioFile(context, fileInfo, done) {
var xhr = new XMLHttpRequest();
xhr.open('GET', fileInfo.url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
if (xhr.status === 200) {
context.decodeAudioData(xhr.response,
function (buffer) {
console.log('[VRAudioPanner] File loaded: ' + fileInfo.url);
done(fileInfo.name, buffer);
},
function (message) {
console.log('[VRAudioPanner] Decoding failure: ' + fileInfo.url + ' (' + message + ')');
done(fileInfo.name, null);
});
} else {
console.log('[VRAudioPanner] XHR Error: ' + fileInfo.url + ' (' + xhr.statusText + ')');
done(fileInfo.name, null);
}
};
xhr.onerror = function (event) {
console.log('[VRAudioPanner] XHR Network failure: ' + fileInfo.url);
done(fileInfo.name, null);
};
xhr.send();
}
/**
* A wrapper/container class for multiple file loaders.
* @param {Object} context AudioContext
* @param {Object} audioFileData Audio file info in the format of {name, url}
* @param {Function} resolve Resolution handler for promise.
* @param {Function} reject Rejection handler for promise.
* @param {Function} progress Progress event handler.
*/
function AudioBufferManager(context, audioFileData, resolve, reject, progress) {
this._context = context;
this._resolve = resolve;
this._reject = reject;
this._progress = progress;
this._buffers = new Map();
this._loadingTasks = {};
// Iterating file loading.
for (var i = 0; i < audioFileData.length; i++) {
var fileInfo = audioFileData[i];
// Check for duplicates filename and quit if it happens.
if (this._loadingTasks.hasOwnProperty(fileInfo.name)) {
console.log('[VRAudioPanner] Duplicated filename in AudioBufferManager: ' + fileInfo.name);
return;
}
// Mark it as pending (0)
this._loadingTasks[fileInfo.name] = 0;
_loadAudioFile(this._context, fileInfo, this._done.bind(this));
}
}
AudioBufferManager.prototype._done = function (filename, buffer) {
// Label the loading task.
this._loadingTasks[filename] = buffer !== null ? 'loaded' : 'failed';
// A failed task will be a null buffer.
this._buffers.set(filename, buffer);
this._updateProgress(filename);
};
AudioBufferManager.prototype._updateProgress = function (filename) {
var numberOfFinishedTasks = 0, numberOfFailedTask = 0;
var numberOfTasks = 0;
for (var task in this._loadingTasks) {
numberOfTasks++;
if (this._loadingTasks[task] === 'loaded')
numberOfFinishedTasks++;
else if (this._loadingTasks[task] === 'failed')
numberOfFailedTask++;
}
if (typeof this._progress === 'function')
this._progress(filename, numberOfFinishedTasks, numberOfTasks);
if (numberOfFinishedTasks === numberOfTasks)
this._resolve(this._buffers);
if (numberOfFinishedTasks + numberOfFailedTask === numberOfTasks)
this._reject(this._buffers);
};
/**
* Returns true if the web audio implementation is outdated.
* @return {Boolean}
*/
VRAudioPanner.isWebAudioOutdated = function () {
return _LEGACY_WEBAUDIO;
}
/**
* Static method for updating listener's position.
* @param {Array} position Listener position in x, y, z.
*/
VRAudioPanner.setListenerPosition = function (position) {
_context.listener.setPosition.apply(_context.listener, position);
};
/**
* Static method for updating listener's orientation.
* @param {Array} orientation Listener orientation in x, y, z.
* @param {Array} orientation Listener's up vector in x, y, z.
*/
VRAudioPanner.setListenerOrientation = function (orientation, upvector) {
_context.listener.setOrientation(
orientation[0], orientation[1], orientation[2],
upvector[0], upvector[1], upvector[2]);
};
/**
* Load an audio file asynchronously.
* @param {Array} dataModel Audio file info in the format of {name, url}
* @param {Function} onprogress Callback function for reporting the progress.
* @return {Promise} Promise.
*/
VRAudioPanner.loadAudioFiles = function (dataModel, onprogress) {
return new Promise(function (resolve, reject) {
new AudioBufferManager(_context, dataModel, resolve, reject, onprogress);
});
};
/**
* Create a source player. See TestSource class for parameter description.
* @return {TestSource}
*/
VRAudioPanner.createTestSource = function (options) {
return new TestSource(options);
};
})(VRAudioPanner = {});

View file

@ -0,0 +1,210 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
/*
Like CubeSea, but designed around a users physical space. One central platform
that maps to the users play area and several floating cubes that sit just
those boundries (just to add visual interest)
*/
window.VRCubeIsland = (function () {
"use strict";
var cubeIslandVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var cubeIslandFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var CubeIsland = function (gl, texture, width, depth) {
this.gl = gl;
this.statsMat = mat4.create();
this.texture = texture;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(cubeIslandVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(cubeIslandFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
this.vertBuffer = gl.createBuffer();
this.indexBuffer = gl.createBuffer();
this.resize(width, depth);
};
CubeIsland.prototype.resize = function (width, depth) {
var gl = this.gl;
this.width = width;
this.depth = depth;
var cubeVerts = [];
var cubeIndices = [];
// Build a single box.
function appendBox (left, bottom, back, right, top, front) {
// Bottom
var idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(left, bottom, back, 0.0, 1.0);
cubeVerts.push(right, bottom, back, 1.0, 1.0);
cubeVerts.push(right, bottom, front, 1.0, 0.0);
cubeVerts.push(left, bottom, front, 0.0, 0.0);
// Top
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, top, back, 0.0, 0.0);
cubeVerts.push(right, top, back, 1.0, 0.0);
cubeVerts.push(right, top, front, 1.0, 1.0);
cubeVerts.push(left, top, front, 0.0, 1.0);
// Left
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, bottom, back, 0.0, 1.0);
cubeVerts.push(left, top, back, 0.0, 0.0);
cubeVerts.push(left, top, front, 1.0, 0.0);
cubeVerts.push(left, bottom, front, 1.0, 1.0);
// Right
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(right, bottom, back, 1.0, 1.0);
cubeVerts.push(right, top, back, 1.0, 0.0);
cubeVerts.push(right, top, front, 0.0, 0.0);
cubeVerts.push(right, bottom, front, 0.0, 1.0);
// Back
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, bottom, back, 1.0, 1.0);
cubeVerts.push(right, bottom, back, 0.0, 1.0);
cubeVerts.push(right, top, back, 0.0, 0.0);
cubeVerts.push(left, top, back, 1.0, 0.0);
// Front
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(left, bottom, front, 0.0, 1.0);
cubeVerts.push(right, bottom, front, 1.0, 1.0);
cubeVerts.push(right, top, front, 1.0, 0.0);
cubeVerts.push(left, top, front, 0.0, 0.0);
}
// Appends a cube with the given centerpoint and size.
function appendCube (x, y, z, size) {
var halfSize = size * 0.5;
appendBox(x - halfSize, y - halfSize, z - halfSize,
x + halfSize, y + halfSize, z + halfSize);
}
// Main "island", covers where the user can safely stand. Top of the cube
// (the ground the user stands on) should be at Y=0 to align with users
// floor. X=0 and Z=0 should be at the center of the users play space.
appendBox(-width * 0.5, -width, -depth * 0.5, width * 0.5, 0, depth * 0.5);
// A sprinkling of other cubes to make things more visually interesting.
appendCube(1.1, 0.3, (-depth * 0.5) - 0.8, 0.5);
appendCube(-0.5, 1.0, (-depth * 0.5) - 0.9, 0.75);
appendCube(0.6, 1.5, (-depth * 0.5) - 0.6, 0.4);
appendCube(-1.0, 0.5, (-depth * 0.5) - 0.5, 0.2);
appendCube((-width * 0.5) - 0.8, 0.3, -1.1, 0.5);
appendCube((-width * 0.5) - 0.9, 1.0, 0.5, 0.75);
appendCube((-width * 0.5) - 0.6, 1.5, -0.6, 0.4);
appendCube((-width * 0.5) - 0.5, 0.5, 1.0, 0.2);
appendCube((width * 0.5) + 0.8, 0.3, 1.1, 0.5);
appendCube((width * 0.5) + 0.9, 1.0, -0.5, 0.75);
appendCube((width * 0.5) + 0.6, 1.5, 0.6, 0.4);
appendCube((width * 0.5) + 0.5, 0.5, -1.0, 0.2);
appendCube(1.1, 1.4, (depth * 0.5) + 0.8, 0.5);
appendCube(-0.5, 1.0, (depth * 0.5) + 0.9, 0.75);
appendCube(0.6, 0.4, (depth * 0.5) + 0.6, 0.4);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
this.indexCount = cubeIndices.length;
};
CubeIsland.prototype.render = function (projectionMat, modelViewMat, stats) {
var gl = this.gl;
var program = this.program;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
if (stats) {
// To ensure that the FPS counter is visible in VR mode we have to
// render it as part of the scene.
mat4.fromTranslation(this.statsMat, [0, 1.5, -this.depth * 0.5]);
mat4.scale(this.statsMat, this.statsMat, [0.5, 0.5, 0.5]);
mat4.rotateX(this.statsMat, this.statsMat, -0.75);
mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
stats.render(projectionMat, this.statsMat);
}
};
return CubeIsland;
})();

View file

@ -0,0 +1,188 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
window.VRCubeSea = (function () {
"use strict";
var cubeSeaVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var cubeSeaFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var CubeSea = function (gl, texture) {
this.gl = gl;
this.statsMat = mat4.create();
this.texture = texture;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(cubeSeaVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(cubeSeaFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
var cubeVerts = [];
var cubeIndices = [];
// Build a single cube.
function appendCube (x, y, z) {
if (!x && !y && !z) {
// Don't create a cube in the center.
return;
}
var size = 0.2;
// Bottom
var idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y - size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y - size, z + size, 0.0, 0.0);
// Top
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0);
cubeVerts.push(x + size, y + size, z + size, 1.0, 1.0);
cubeVerts.push(x - size, y + size, z + size, 0.0, 1.0);
// Left
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x - size, y + size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y - size, z + size, 1.0, 1.0);
// Right
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0);
cubeVerts.push(x + size, y + size, z + size, 0.0, 0.0);
cubeVerts.push(x + size, y - size, z + size, 0.0, 1.0);
// Back
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x + size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x - size, y + size, z - size, 1.0, 0.0);
// Front
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x - size, y - size, z + size, 0.0, 1.0);
cubeVerts.push(x + size, y - size, z + size, 1.0, 1.0);
cubeVerts.push(x + size, y + size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y + size, z + size, 0.0, 0.0);
}
var gridSize = 10;
// Build the cube sea
for (var x = 0; x < gridSize; ++x) {
for (var y = 0; y < gridSize; ++y) {
for (var z = 0; z < gridSize; ++z) {
appendCube(x - (gridSize / 2), y - (gridSize / 2), z - (gridSize / 2));
}
}
}
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
this.indexCount = cubeIndices.length;
};
var mortimer = mat4.create();
var a = [0.9868122935295105, -0.03754837438464165, -0.15745431184768677, 0, 0.011360996402800083, 0.9863911271095276, -0.1640235036611557, 0, 0.16147033870220184, 0.16007155179977417, 0.9738093614578247, 0, 0.192538782954216, 0.024526841938495636, -0.001076754298992455, 1.0000001192092896];
for (var i = 0; i < 16; ++i) {
mortimer[i] = a[i];
}
CubeSea.prototype.render = function (projectionMat, modelViewMat, stats) {
var gl = this.gl;
var program = this.program;
//mat4.invert(mortimer, modelViewMat);
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
if (stats) {
// To ensure that the FPS counter is visible in VR mode we have to
// render it as part of the scene.
mat4.fromTranslation(this.statsMat, [0, -0.3, -0.5]);
mat4.scale(this.statsMat, this.statsMat, [0.3, 0.3, 0.3]);
mat4.rotateX(this.statsMat, this.statsMat, -0.75);
mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
stats.render(projectionMat, this.statsMat);
}
};
return CubeSea;
})();

View file

@ -0,0 +1,219 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
window.VRPanorama = (function () {
"use strict";
var panoVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var panoFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var Panorama = function (gl) {
this.gl = gl;
this.texture = gl.createTexture();
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(panoVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(panoFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
var panoVerts = [];
var panoIndices = [];
var radius = 2; // 2 meter radius sphere
var latSegments = 40;
var lonSegments = 40;
// Create the vertices
for (var i=0; i <= latSegments; ++i) {
var theta = i * Math.PI / latSegments;
var sinTheta = Math.sin(theta);
var cosTheta = Math.cos(theta);
for (var j=0; j <= lonSegments; ++j) {
var phi = j * 2 * Math.PI / lonSegments;
var sinPhi = Math.sin(phi);
var cosPhi = Math.cos(phi);
var x = sinPhi * sinTheta;
var y = cosTheta;
var z = -cosPhi * sinTheta;
var u = (j / lonSegments);
var v = (i / latSegments);
panoVerts.push(x * radius, y * radius, z * radius, u, v);
}
}
// Create the indices
for (var i = 0; i < latSegments; ++i) {
var offset0 = i * (lonSegments+1);
var offset1 = (i+1) * (lonSegments+1);
for (var j = 0; j < lonSegments; ++j) {
var index0 = offset0+j;
var index1 = offset1+j;
panoIndices.push(
index0, index1, index0+1,
index1, index1+1, index0+1
);
}
}
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(panoVerts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(panoIndices), gl.STATIC_DRAW);
this.indexCount = panoIndices.length;
this.imgElement = null;
this.videoElement = null;
};
Panorama.prototype.setImage = function (url) {
var gl = this.gl;
var self = this;
return new Promise(function(resolve, reject) {
var img = new Image();
img.addEventListener('load', function() {
self.imgElement = img;
self.videoElement = null;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, img);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
resolve(self.imgElement);
});
img.addEventListener('error', function(ev) {
console.error(ev.message);
reject(ev.message);
}, false);
img.crossOrigin = 'anonymous';
img.src = url;
});
};
Panorama.prototype.setVideo = function (url) {
var gl = this.gl;
var self = this;
return new Promise(function(resolve, reject) {
var video = document.createElement('video');
video.addEventListener('canplay', function() {
// Added "click to play" UI?
});
video.addEventListener('playing', function() {
self.videoElement = video;
self.imgElement = null;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, self.videoElement);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
resolve(self.videoElement);
});
video.addEventListener('error', function(ev) {
console.error(video.error);
reject(video.error);
}, false);
video.loop = true;
video.autoplay = true;
video.crossOrigin = 'anonymous';
video.setAttribute('webkit-playsinline', '');
video.src = url;
});
};
Panorama.prototype.play = function() {
if (this.videoElement)
this.videoElement.play();
};
Panorama.prototype.pause = function() {
if (this.videoElement)
this.videoElement.pause();
};
Panorama.prototype.isPaused = function() {
if (this.videoElement)
return this.videoElement.paused;
return false;
};
Panorama.prototype.render = function (projectionMat, modelViewMat) {
var gl = this.gl;
var program = this.program;
if (!this.imgElement && !this.videoElement)
return;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
if (this.videoElement && !this.videoElement.paused) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.videoElement);
}
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
};
return Panorama;
})();

View file

@ -0,0 +1,181 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
window.VRSamplesUtil = (function () {
"use strict";
// Lifted from the WebVR Polyfill
function isMobile () {
return /Android/i.test(navigator.userAgent) ||
/iPhone|iPad|iPod/i.test(navigator.userAgent);
}
function getMessageContainer () {
var messageContainer = document.getElementById("vr-sample-message-container");
if (!messageContainer) {
messageContainer = document.createElement("div");
messageContainer.id = "vr-sample-message-container";
messageContainer.style.fontFamily = "sans-serif";
messageContainer.style.position = "absolute";
messageContainer.style.zIndex = "999";
messageContainer.style.left = "0";
messageContainer.style.top = "0";
messageContainer.style.right = "0";
messageContainer.style.margin = "0";
messageContainer.style.padding = "0";
messageContainer.align = "center";
document.body.appendChild(messageContainer);
}
return messageContainer;
}
function addMessageElement (message, backgroundColor) {
var messageElement = document.createElement("div");
messageElement.classList.add = "vr-sample-message";
messageElement.style.color = "#FFF";
messageElement.style.backgroundColor = backgroundColor;
messageElement.style.borderRadius = "3px";
messageElement.style.position = "relative";
messageElement.style.display = "inline-block";
messageElement.style.margin = "0.5em";
messageElement.style.padding = "0.75em";
messageElement.innerHTML = message;
getMessageContainer().appendChild(messageElement);
return messageElement;
}
// Makes the given element fade out and remove itself from the DOM after the
// given timeout.
function makeToast (element, timeout) {
element.style.transition = "opacity 0.5s ease-in-out";
element.style.opacity = "1";
setTimeout(function () {
element.style.opacity = "0";
setTimeout(function () {
if (element.parentElement)
element.parentElement.removeChild(element);
}, 500);
}, timeout);
}
function addError (message, timeout) {
var element = addMessageElement("<b>ERROR:</b> " + message, "#D33");
if (timeout) {
makeToast(element, timeout);
}
return element;
}
function addInfo (message, timeout) {
var element = addMessageElement(message, "#22A");
if (timeout) {
makeToast(element, timeout);
}
return element;
}
function getButtonContainer () {
var buttonContainer = document.getElementById("vr-sample-button-container");
if (!buttonContainer) {
buttonContainer = document.createElement("div");
buttonContainer.id = "vr-sample-button-container";
buttonContainer.style.fontFamily = "sans-serif";
buttonContainer.style.position = "absolute";
buttonContainer.style.zIndex = "999";
buttonContainer.style.left = "0";
buttonContainer.style.bottom = "0";
buttonContainer.style.right = "0";
buttonContainer.style.margin = "0";
buttonContainer.style.padding = "0";
buttonContainer.align = "right";
document.body.appendChild(buttonContainer);
}
return buttonContainer;
}
function addButtonElement (message, key, icon) {
var buttonElement = document.createElement("div");
buttonElement.classList.add = "vr-sample-button";
buttonElement.style.color = "#FFF";
buttonElement.style.fontWeight = "bold";
buttonElement.style.backgroundColor = "#888";
buttonElement.style.borderRadius = "5px";
buttonElement.style.border = "3px solid #555";
buttonElement.style.position = "relative";
buttonElement.style.display = "inline-block";
buttonElement.style.margin = "0.5em";
buttonElement.style.padding = "0.75em";
buttonElement.style.cursor = "pointer";
buttonElement.align = "center";
if (icon) {
buttonElement.innerHTML = "<img src='" + icon + "'/><br/>" + message;
} else {
buttonElement.innerHTML = message;
}
if (key) {
var keyElement = document.createElement("span");
keyElement.classList.add = "vr-sample-button-accelerator";
keyElement.style.fontSize = "0.75em";
keyElement.style.fontStyle = "italic";
keyElement.innerHTML = " (" + key + ")";
buttonElement.appendChild(keyElement);
}
getButtonContainer().appendChild(buttonElement);
return buttonElement;
}
function addButton (message, key, icon, callback) {
var keyListener = null;
if (key) {
var keyCode = key.charCodeAt(0);
keyListener = function (event) {
if (event.keyCode === keyCode) {
callback(event);
}
};
document.addEventListener("keydown", keyListener, false);
}
var element = addButtonElement(message, key, icon);
element.addEventListener("click", function (event) {
callback(event);
event.preventDefault();
}, false);
return {
element: element,
keyListener: keyListener
};
}
function removeButton (button) {
if (!button)
return;
if (button.element.parentElement)
button.element.parentElement.removeChild(button.element);
if (button.keyListener)
document.removeEventListener("keydown", button.keyListener, false);
}
return {
isMobile: isMobile,
addError: addError,
addInfo: addInfo,
addButton: addButton,
removeButton: removeButton,
makeToast: makeToast
};
})();

Binary file not shown.

After

Width:  |  Height:  |  Size: 788 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

View file

@ -0,0 +1,312 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>05 - Room Scale</title>
<!--
This sample demonstrates how to create scenes that align with the space
physically available to the user (when that information is available).
-->
<style>
#webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-debug-geometry.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-island.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script>
/* global mat4, vec3, VRCubeIsland, WGLUDebugGeometry, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var PLAYER_HEIGHT = 1.65;
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ===================================================
// WebGL scene setup. This code is not WebVR specific.
// ===================================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var gl = null;
var cubeIsland = null;
var stats = null;
var debugGeom = null;
function initWebGL (preserveDrawingBuffer) {
var glAttribs = {
alpha: false,
antialias: false, //!VRSamplesUtil.isMobile(),
preserveDrawingBuffer: false //preserveDrawingBuffer
};
gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
// If the VRDisplay doesn't have stageParameters we won't know
// how big the users play space. Construct a scene around a
// default space size like 2 meters by 2 meters as a placeholder.
cubeIsland = new VRCubeIsland(gl, texture, 2, 2);
stats = new WGLUStats(gl);
debugGeom = new WGLUDebugGeometry(gl);
// Wait until we have a WebGL context to resize and start rendering.
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
// ================================
// WebVR-specific code begins here.
// ================================
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
initWebGL(true);
if (vrDisplay.stageParameters &&
vrDisplay.stageParameters.sizeX > 0 &&
vrDisplay.stageParameters.sizeZ > 0) {
// If we have stageParameters with a valid size use that to resize
// our scene to match the users available space more closely. The
// check for size > 0 is necessary because some devices, like the
// Oculus Rift, can give you a standing space coordinate but don't
// have a configured play area. These devices will return a stage
// size of 0.
cubeIsland.resize(vrDisplay.stageParameters.sizeX, vrDisplay.stageParameters.sizeZ);
} else {
if (vrDisplay.stageParameters) {
VRSamplesUtil.addInfo("VRDisplay reported stageParameters, but stage size was 0. Using default size.", 3000);
} else {
VRSamplesUtil.addInfo("VRDisplay did not report stageParameters", 3000);
}
}
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
} else {
initWebGL(false);
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
// Get a matrix for the pose that takes into account the stageParameters
// if we have them, and otherwise adjusts the position to ensure we're
// not stuck in the floor.
function getStandingViewMatrix (out, view) {
if (vrDisplay.stageParameters) {
// If the headset provides stageParameters use the
// sittingToStandingTransform to transform the view matrix into a
// space where the floor in the center of the users play space is the
// origin.
mat4.invert(out, vrDisplay.stageParameters.sittingToStandingTransform);
mat4.multiply(out, view, out);
} else {
// Otherwise you'll want to translate the view to compensate for the
// scene floor being at Y=0. Ideally this should match the user's
// height (you may want to make it configurable). For this demo we'll
// just assume all human beings are 1.65 meters (~5.4ft) tall.
mat4.identity(out);
mat4.translate(out, out, [0, PLAYER_HEIGHT, 0]);
mat4.invert(out, out);
mat4.multiply(out, view, out);
}
}
function renderSceneView (projection, view, pose) {
cubeIsland.render(projection, view, stats);
// For fun, draw a blue cube where the players head would have been if
// we weren't taking the stageParameters into account. It'll start in
// the center of the floor.
var orientation = pose.orientation;
var position = pose.position;
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!position) { position = [0, 0, 0]; }
debugGeom.bind(projection, view);
debugGeom.drawCube(orientation, position, 0.2, [0, 0, 1, 1]);
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
renderSceneView(frameData.leftProjectionMatrix, viewMat, frameData.pose);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
getStandingViewMatrix(viewMat, frameData.rightViewMatrix);
renderSceneView(frameData.rightProjectionMatrix, viewMat, frameData.pose);
vrDisplay.submitFrame();
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
renderSceneView(projectionMat, viewMat, frameData.pose);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
mat4.translate(viewMat, viewMat, [0, -PLAYER_HEIGHT, 0]);
cubeIsland.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>

View file

@ -0,0 +1,262 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>04 - Simple Mirroring</title>
<!--
This sample demonstrates how to mirror content to an external display
while presenting to a VRDisplay.
-->
<style>
#webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ================================
// WebVR-specific code begins here.
// ================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var gl = null;
var cubeSea = null;
var stats = null;
function initWebGL (preserveDrawingBuffer) {
// Setting preserveDrawingBuffer to true prevents the canvas from being
// implicitly cleared when calling submitFrame or compositing the canvas
// on the document. For the simplest form of mirroring we want to create
// the canvas with that option enabled. Note that this may incur a
// performance penalty, as it may imply that additional copies of the
// canvas backbuffer need to be made. As a result, we ONLY want to set
// that if we know the VRDisplay has an external display, which is why
// we defer WebGL initialization until after we've gotten results back
// from navigator.getDisplays and know which device we'll be
// presenting with.
var glAttribs = {
alpha: false,
antialias: false, //!VRSamplesUtil.isMobile(),
preserveDrawingBuffer: false //preserveDrawingBuffer
};
gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
cubeSea = new VRCubeSea(gl, texture);
stats = new WGLUStats(gl);
// Wait until we have a WebGL context to resize and start rendering.
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
// Only use preserveDrawingBuffer if we have an external display to
// mirror to.
initWebGL(vrDisplay.capabilities.hasExternalDisplay);
} else {
initWebGL(false);
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
// No VR means no mirroring, so create WebGL content without
// preserveDrawingBuffer
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
// If we're presenting we want to use the drawing buffer size
// recommended by the VRDevice, since that will ensure the best
// results post-distortion.
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
// For simplicity we're going to render both eyes at the same size,
// even if one eye needs less resolution. You can render each eye at
// the exact size it needs, but you'll need to adjust the viewports to
// account for that.
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
// We only want to change the size of the canvas drawing buffer to
// match the window dimensions when we're not presenting.
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
vrDisplay.submitFrame();
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>

View file

@ -0,0 +1,307 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>03 - VR Presentation</title>
<!--
This sample demonstrates how to present the contents of a WebGL canvas to
a VRDisplay. The content is not mirrored on the main display while being
presented.
-->
<style>
#webgl-canvas, #presenting-message {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
#presenting-message {
color: white;
font-family: sans-serif;
font-size: 2em;
font-weight: bold;
z-index: 1;
text-align: center;
padding: 0.5em;
background-color: #444;
display: none;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<div id="presenting-message">Put on your headset now</div>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ===================================================
// WebGL scene setup. This code is not WebVR specific.
// ===================================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var glAttribs = {
alpha: false,
antialias: false //!VRSamplesUtil.isMobile()
};
var gl = webglCanvas.getContext("webgl", glAttribs);
if (!gl) {
gl = webglCanvas.getContext("experimental-webgl", glAttribs);
if (!gl) {
VRSamplesUtil.addError("Your browser does not support WebGL.");
return;
}
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
var cubeSea = new VRCubeSea(gl, texture);
var stats = new WGLUStats(gl);
var presentingMessage = document.getElementById("presenting-message");
// ================================
// WebVR-specific code begins here.
// ================================
function onVRRequestPresent () {
// This can only be called in response to a user gesture.
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
onVRPresentChange();
// Nothing to do because we're handling things in onVRPresentChange.
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
// No sense in exiting presentation if we're not actually presenting.
// (This may happen if we get an event like vrdisplaydeactivate when
// we weren't presenting.)
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
// Nothing to do because we're handling things in onVRPresentChange.
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
// When we begin or end presenting, the canvas should be resized to the
// recommended dimensions for the display.
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
// Because we're not mirroring any images on an external screen will
// freeze while presenting. It's better to replace it with a message
// indicating that content is being shown on the VRDisplay.
presentingMessage.style.display = "block";
// On devices with an external display the UA may not provide a way
// to exit VR presentation mode, so we should provide one ourselves.
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
// If we have an external display take down the presenting message and
// change the button back to "Enter VR".
if (vrDisplay.capabilities.hasExternalDisplay) {
presentingMessage.style.display = "";
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.vr) {
frameData = new VRFrameData();
navigator.vr.getDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
// It's heighly reccommended that you set the near and far planes to
// something appropriate for your scene so the projection matricies
// WebVR produces have a well scaled depth buffer.
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
// Generally, you want to wait until VR support is confirmed and
// you know the user has a VRDisplay capable of presenting connected
// before adding UI that advertises VR features.
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
// The UA may kick us out of VR present mode for any reason, so to
// ensure we always know when we begin/end presenting we need to
// listen for vrdisplaypresentchange events.
vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
// These events fire when the user agent has had some indication that
// it would be appropariate to enter or exit VR presentation mode, such
// as the user putting on a headset and triggering a proximity sensor.
// You can inspect the `reason` property of the event to learn why the
// event was fired, but in this case we're going to always trust the
// event and enter or exit VR presentation mode when asked.
//vrDisplay.addEventListener('activate', onVRRequestPresent, false);
//vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
} else {
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
});
} else if (navigator.getVRDevices) {
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
// If we're presenting we want to use the drawing buffer size
// recommended by the VRDevice, since that will ensure the best
// results post-distortion.
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
// For simplicity we're going to render both eyes at the same size,
// even if one eye needs less resolution. You can render each eye at
// the exact size it needs, but you'll need to adjust the viewports to
// account for that.
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
// We only want to change the size of the canvas drawing buffer to
// match the window dimensions when we're not presenting.
webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
}
}
window.addEventListener("resize", onResize, false);
onResize();
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
// When presenting content to the VRDisplay we want to update at its
// refresh rate if it differs from the refresh rate of the main
// display. Calling VRDisplay.requestAnimationFrame ensures we render
// at the right speed for VR.
vrDisplay.requestAnimationFrame(onAnimationFrame);
// As a general rule you want to get the pose as late as possible
// and call VRDisplay.submitFrame as early as possible after
// retrieving the pose. Do any work for the frame that doesn't need
// to know the pose earlier to ensure the lowest latency possible.
//var pose = vrDisplay.getPose();
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
// When presenting render a stereo view.
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
// If we're currently presenting to the VRDisplay we need to
// explicitly indicate we're done rendering.
vrDisplay.submitFrame();
} else {
// When not presenting render a mono view that still takes pose into
// account.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
// It's best to use our own projection matrix in this case, but we can use the left eye's view matrix
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
window.requestAnimationFrame(onAnimationFrame);
})();
</script>
</body>
</html>