WebVR API Implementation, r=larsbergstrom

This commit is contained in:
Imanol Fernandez 2016-12-16 18:39:35 +01:00
parent 13826970c4
commit c5705bff50
70 changed files with 13044 additions and 20 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,270 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
var WGLUDebugGeometry = (function() {
"use strict";
var debugGeomVS = [
"uniform mat4 projectionMat;",
"uniform mat4 viewMat;",
"uniform mat4 modelMat;",
"attribute vec3 position;",
"void main() {",
" gl_Position = projectionMat * viewMat * modelMat * vec4( position, 1.0 );",
"}",
].join("\n");
var debugGeomFS = [
"precision mediump float;",
"uniform vec4 color;",
"void main() {",
" gl_FragColor = color;",
"}",
].join("\n");
var DebugGeometry = function(gl) {
this.gl = gl;
this.projMat = mat4.create();
this.viewMat = mat4.create();
this.modelMat = mat4.create();
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(debugGeomVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(debugGeomFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({ position: 0 });
this.program.link();
var verts = [];
var indices = [];
//
// Cube Geometry
//
this.cubeIndexOffset = indices.length;
var size = 0.5;
// Bottom
var idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(-size, -size, -size);
verts.push(+size, -size, -size);
verts.push(+size, -size, +size);
verts.push(-size, -size, +size);
// Top
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, +size, -size);
verts.push(+size, +size, -size);
verts.push(+size, +size, +size);
verts.push(-size, +size, +size);
// Left
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, -size, -size);
verts.push(-size, +size, -size);
verts.push(-size, +size, +size);
verts.push(-size, -size, +size);
// Right
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(+size, -size, -size);
verts.push(+size, +size, -size);
verts.push(+size, +size, +size);
verts.push(+size, -size, +size);
// Back
idx = verts.length / 3.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, -size, -size);
verts.push(+size, -size, -size);
verts.push(+size, +size, -size);
verts.push(-size, +size, -size);
// Front
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(-size, -size, +size);
verts.push(+size, -size, +size);
verts.push(+size, +size, +size);
verts.push(-size, +size, +size);
this.cubeIndexCount = indices.length - this.cubeIndexOffset;
//
// Cone Geometry
//
this.coneIndexOffset = indices.length;
var size = 0.5;
var conePointVertex = verts.length / 3.0;
var coneBaseVertex = conePointVertex+1;
var coneSegments = 16;
// Point
verts.push(0, size, 0);
// Base Vertices
for (var i = 0; i < coneSegments; ++i) {
if (i > 0) {
idx = verts.length / 3.0;
indices.push(idx-1, conePointVertex, idx);
}
var rad = ((Math.PI * 2) / coneSegments) * i;
verts.push(Math.sin(rad) * (size / 2.0), -size, Math.cos(rad) * (size / 2.0));
}
// Last triangle to fill the gap
indices.push(idx, conePointVertex, coneBaseVertex);
// Base triangles
for (var i = 2; i < coneSegments; ++i) {
indices.push(coneBaseVertex, coneBaseVertex+(i-1), coneBaseVertex+i);
}
this.coneIndexCount = indices.length - this.coneIndexOffset;
//
// Rect geometry
//
this.rectIndexOffset = indices.length;
idx = verts.length / 3.0;
indices.push(idx, idx+1, idx+2, idx+3, idx);
verts.push(0, 0, 0);
verts.push(1, 0, 0);
verts.push(1, 1, 0);
verts.push(0, 1, 0);
this.rectIndexCount = indices.length - this.rectIndexOffset;
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
};
DebugGeometry.prototype.bind = function(projectionMat, viewMat) {
var gl = this.gl;
var program = this.program;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.viewMat, false, viewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 12, 0);
};
DebugGeometry.prototype.bindOrtho = function() {
mat4.ortho(this.projMat, 0, this.gl.canvas.width, this.gl.canvas.height, 0, 0.1, 1024);
mat4.identity(this.viewMat);
this.bind(this.projMat, this.viewMat);
};
DebugGeometry.prototype._bindUniforms = function(orientation, position, scale, color) {
if (!position) { position = [0, 0, 0]; }
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!scale) { scale = [1, 1, 1]; }
if (!color) { color = [1, 0, 0, 1]; }
mat4.fromRotationTranslationScale(this.modelMat, orientation, position, scale);
this.gl.uniformMatrix4fv(this.program.uniform.modelMat, false, this.modelMat);
this.gl.uniform4fv(this.program.uniform.color, color);
};
DebugGeometry.prototype.drawCube = function(orientation, position, size, color) {
var gl = this.gl;
if (!size) { size = 1; }
this._bindUniforms(orientation, position, [size, size, size], color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawBox = function(orientation, position, scale, color) {
var gl = this.gl;
this._bindUniforms(orientation, position, scale, color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawBoxWithMatrix = function(mat, color) {
var gl = this.gl;
gl.uniformMatrix4fv(this.program.uniform.modelMat, false, mat);
gl.uniform4fv(this.program.uniform.color, color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawRect = function(x, y, width, height, color) {
var gl = this.gl;
this._bindUniforms(null, [x, y, -1], [width, height, 1], color);
gl.drawElements(gl.LINE_STRIP, this.rectIndexCount, gl.UNSIGNED_SHORT, this.rectIndexOffset * 2.0);
};
DebugGeometry.prototype.drawCone = function(orientation, position, size, color) {
var gl = this.gl;
if (!size) { size = 1; }
this._bindUniforms(orientation, position, [size, size, size], color);
gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
};
DebugGeometry.prototype.drawConeWithMatrix = function(mat, color) {
var gl = this.gl;
gl.uniformMatrix4fv(this.program.uniform.modelMat, false, mat);
gl.uniform4fv(this.program.uniform.color, color);
gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
};
return DebugGeometry;
})();

View file

@ -0,0 +1,162 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Caches specified GL state, runs a callback, and restores the cached state when
done.
Example usage:
var savedState = [
gl.ARRAY_BUFFER_BINDING,
// TEXTURE_BINDING_2D or _CUBE_MAP must always be followed by the texure unit.
gl.TEXTURE_BINDING_2D, gl.TEXTURE0,
gl.CLEAR_COLOR,
];
// After this call the array buffer, texture unit 0, active texture, and clear
// color will be restored. The viewport will remain changed, however, because
// gl.VIEWPORT was not included in the savedState list.
WGLUPreserveGLState(gl, savedState, function(gl) {
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, ....);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, ...);
gl.clearColor(1, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
});
Note that this is not intended to be fast. Managing state in your own code to
avoid redundant state setting and querying will always be faster. This function
is most useful for cases where you may not have full control over the WebGL
calls being made, such as tooling or effect injectors.
*/
function WGLUPreserveGLState(gl, bindings, callback) {
if (!bindings) {
callback(gl);
return;
}
var boundValues = [];
var activeTexture = null;
for (var i = 0; i < bindings.length; ++i) {
var binding = bindings[i];
switch (binding) {
case gl.TEXTURE_BINDING_2D:
case gl.TEXTURE_BINDING_CUBE_MAP:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31) {
console.error("TEXTURE_BINDING_2D or TEXTURE_BINDING_CUBE_MAP must be followed by a valid texture unit");
boundValues.push(null, null);
break;
}
if (!activeTexture) {
activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
}
gl.activeTexture(textureUnit);
boundValues.push(gl.getParameter(binding), null);
break;
case gl.ACTIVE_TEXTURE:
activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
boundValues.push(null);
break;
default:
boundValues.push(gl.getParameter(binding));
break;
}
}
callback(gl);
for (var i = 0; i < bindings.length; ++i) {
var binding = bindings[i];
var boundValue = boundValues[i];
switch (binding) {
case gl.ACTIVE_TEXTURE:
break; // Ignore this binding, since we special-case it to happen last.
case gl.ARRAY_BUFFER_BINDING:
gl.bindBuffer(gl.ARRAY_BUFFER, boundValue);
break;
case gl.COLOR_CLEAR_VALUE:
gl.clearColor(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.COLOR_WRITEMASK:
gl.colorMask(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.CURRENT_PROGRAM:
gl.useProgram(boundValue);
break;
case gl.ELEMENT_ARRAY_BUFFER_BINDING:
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, boundValue);
break;
case gl.FRAMEBUFFER_BINDING:
gl.bindFramebuffer(gl.FRAMEBUFFER, boundValue);
break;
case gl.RENDERBUFFER_BINDING:
gl.bindRenderbuffer(gl.RENDERBUFFER, boundValue);
break;
case gl.TEXTURE_BINDING_2D:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
break;
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_2D, boundValue);
break;
case gl.TEXTURE_BINDING_CUBE_MAP:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
break;
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, boundValue);
break;
case gl.VIEWPORT:
gl.viewport(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.BLEND:
case gl.CULL_FACE:
case gl.DEPTH_TEST:
case gl.SCISSOR_TEST:
case gl.STENCIL_TEST:
if (boundValue) {
gl.enable(binding);
} else {
gl.disable(binding);
}
break;
default:
console.log("No GL restore behavior for 0x" + binding.toString(16));
break;
}
if (activeTexture) {
gl.activeTexture(activeTexture);
}
}
}

View file

@ -0,0 +1,179 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Utility class to make loading shader programs easier. Does all the error
checking you typically want, automatically queries uniform and attribute
locations, and attempts to take advantage of some browser's ability to link
asynchronously by not querying any information from the program until it's
first use.
*/
var WGLUProgram = (function() {
"use strict";
// Attempts to allow the browser to asynchronously compile and link
var Program = function(gl) {
this.gl = gl;
this.program = gl.createProgram();
this.attrib = null;
this.uniform = null;
this._firstUse = true;
this._vertexShader = null;
this._fragmentShader = null;
}
Program.prototype.attachShaderSource = function(source, type) {
var gl = this.gl;
var shader;
switch (type) {
case gl.VERTEX_SHADER:
this._vertexShader = gl.createShader(type);
shader = this._vertexShader;
break;
case gl.FRAGMENT_SHADER:
this._fragmentShader = gl.createShader(type);
shader = this._fragmentShader;
break;
default:
console.Error("Invalid Shader Type:", type);
return;
}
gl.attachShader(this.program, shader);
gl.shaderSource(shader, source);
gl.compileShader(shader);
}
Program.prototype.attachShaderSourceFromXHR = function(url, type) {
var self = this;
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.addEventListener("load", function (ev) {
if (xhr.status == 200) {
self.attachShaderSource(xhr.response, type);
resolve();
} else {
reject(xhr.statusText);
}
}, false);
xhr.open("GET", url, true);
xhr.send(null);
});
}
Program.prototype.attachShaderSourceFromTag = function(tagId, type) {
var shaderTag = document.getElementById(tagId);
if (!shaderTag) {
console.error("Shader source tag not found:", tagId);
return;
}
if (!type) {
if (shaderTag.type == "x-shader/x-vertex") {
type = this.gl.VERTEX_SHADER;
} else if (shaderTag.type == "x-shader/x-fragment") {
type = this.gl.FRAGMENT_SHADER;
} else {
console.error("Invalid Shader Type:", shaderTag.type);
return;
}
}
var src = "";
var k = shaderTag.firstChild;
while (k) {
if (k.nodeType == 3) {
src += k.textContent;
}
k = k.nextSibling;
}
this.attachShaderSource(src, type);
}
Program.prototype.bindAttribLocation = function(attribLocationMap) {
var gl = this.gl;
if (attribLocationMap) {
this.attrib = {};
for (var attribName in attribLocationMap) {
gl.bindAttribLocation(this.program, attribLocationMap[attribName], attribName);
this.attrib[attribName] = attribLocationMap[attribName];
}
}
}
Program.prototype.transformFeedbackVaryings = function(varyings, type) {
gl.transformFeedbackVaryings(this.program, varyings, type);
}
Program.prototype.link = function() {
this.gl.linkProgram(this.program);
}
Program.prototype.use = function() {
var gl = this.gl;
// If this is the first time the program has been used do all the error checking and
// attrib/uniform querying needed.
if (this._firstUse) {
if (!gl.getProgramParameter(this.program, gl.LINK_STATUS)) {
if (this._vertexShader && !gl.getShaderParameter(this._vertexShader, gl.COMPILE_STATUS)) {
console.error("Vertex shader compile error:", gl.getShaderInfoLog(this._vertexShader));
} else if (this._fragmentShader && !gl.getShaderParameter(this._fragmentShader, gl.COMPILE_STATUS)) {
console.error("Fragment shader compile error:", gl.getShaderInfoLog(this._fragmentShader));
} else {
console.error("Program link error:", gl.getProgramInfoLog(this.program));
}
gl.deleteProgram(this.program);
this.program = null;
} else {
if (!this.attrib) {
this.attrib = {};
var attribCount = gl.getProgramParameter(this.program, gl.ACTIVE_ATTRIBUTES);
for (var i = 0; i < attribCount; i++) {
var attribInfo = gl.getActiveAttrib(this.program, i);
this.attrib[attribInfo.name] = gl.getAttribLocation(this.program, attribInfo.name);
}
}
this.uniform = {};
var uniformCount = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
var uniformName = "";
for (var i = 0; i < uniformCount; i++) {
var uniformInfo = gl.getActiveUniform(this.program, i);
uniformName = uniformInfo.name.replace("[0]", "");
this.uniform[uniformName] = gl.getUniformLocation(this.program, uniformName);
}
}
gl.deleteShader(this._vertexShader);
gl.deleteShader(this._fragmentShader);
this._firstUse = false;
}
gl.useProgram(this.program);
}
return Program;
})();

View file

@ -0,0 +1,649 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Heavily inspired by Mr. Doobs stats.js, this FPS counter is rendered completely
with WebGL, allowing it to be shown in cases where overlaid HTML elements aren't
usable (like WebVR), or if you want the FPS counter to be rendered as part of
your scene.
See stats-test.html for basic usage.
*/
var WGLUStats = (function() {
"use strict";
//--------------------
// glMatrix functions
//--------------------
// These functions have been copied here from glMatrix (glmatrix.net) to allow
// this file to run standalone.
var mat4_identity = function(out) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
};
var mat4_multiply = function (out, a, b) {
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15];
// Cache only the current line of the second matrix
var b0 = b[0], b1 = b[1], b2 = b[2], b3 = b[3];
out[0] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[1] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[2] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[3] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[4]; b1 = b[5]; b2 = b[6]; b3 = b[7];
out[4] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[5] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[6] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[7] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[8]; b1 = b[9]; b2 = b[10]; b3 = b[11];
out[8] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[9] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[10] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[11] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[12]; b1 = b[13]; b2 = b[14]; b3 = b[15];
out[12] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[13] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[14] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[15] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
return out;
};
var mat4_fromTranslation = function(out, v) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = v[0];
out[13] = v[1];
out[14] = v[2];
out[15] = 1;
return out;
};
var mat4_ortho = function (out, left, right, bottom, top, near, far) {
var lr = 1 / (left - right),
bt = 1 / (bottom - top),
nf = 1 / (near - far);
out[0] = -2 * lr;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = -2 * bt;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 2 * nf;
out[11] = 0;
out[12] = (left + right) * lr;
out[13] = (top + bottom) * bt;
out[14] = (far + near) * nf;
out[15] = 1;
return out;
};
var mat4_translate = function (out, a, v) {
var x = v[0], y = v[1], z = v[2],
a00, a01, a02, a03,
a10, a11, a12, a13,
a20, a21, a22, a23;
if (a === out) {
out[12] = a[0] * x + a[4] * y + a[8] * z + a[12];
out[13] = a[1] * x + a[5] * y + a[9] * z + a[13];
out[14] = a[2] * x + a[6] * y + a[10] * z + a[14];
out[15] = a[3] * x + a[7] * y + a[11] * z + a[15];
} else {
a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3];
a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7];
a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11];
out[0] = a00; out[1] = a01; out[2] = a02; out[3] = a03;
out[4] = a10; out[5] = a11; out[6] = a12; out[7] = a13;
out[8] = a20; out[9] = a21; out[10] = a22; out[11] = a23;
out[12] = a00 * x + a10 * y + a20 * z + a[12];
out[13] = a01 * x + a11 * y + a21 * z + a[13];
out[14] = a02 * x + a12 * y + a22 * z + a[14];
out[15] = a03 * x + a13 * y + a23 * z + a[15];
}
return out;
};
var mat4_scale = function(out, a, v) {
var x = v[0], y = v[1], z = v[2];
out[0] = a[0] * x;
out[1] = a[1] * x;
out[2] = a[2] * x;
out[3] = a[3] * x;
out[4] = a[4] * y;
out[5] = a[5] * y;
out[6] = a[6] * y;
out[7] = a[7] * y;
out[8] = a[8] * z;
out[9] = a[9] * z;
out[10] = a[10] * z;
out[11] = a[11] * z;
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
return out;
};
//-------------------
// Utility functions
//-------------------
function linkProgram(gl, vertexSource, fragmentSource, attribLocationMap) {
// No error checking for brevity.
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexSource);
gl.compileShader(vertexShader);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentSource);
gl.compileShader(fragmentShader);
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
for (var attribName in attribLocationMap)
gl.bindAttribLocation(program, attribLocationMap[attribName], attribName);
gl.linkProgram(program);
gl.deleteShader(vertexShader);
gl.deleteShader(fragmentShader);
return program;
}
function getProgramUniforms(gl, program) {
var uniforms = {};
var uniformCount = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS);
var uniformName = "";
for (var i = 0; i < uniformCount; i++) {
var uniformInfo = gl.getActiveUniform(program, i);
uniformName = uniformInfo.name.replace("[0]", "");
uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
}
return uniforms;
}
//----------------------------
// Seven-segment text display
//----------------------------
var sevenSegmentVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec2 position;",
"void main() {",
" gl_Position = projectionMat * modelViewMat * vec4( position, 0.0, 1.0 );",
"}",
].join("\n");
var sevenSegmentFS = [
"precision mediump float;",
"uniform vec4 color;",
"void main() {",
" gl_FragColor = color;",
"}",
].join("\n");
var SevenSegmentText = function (gl) {
this.gl = gl;
this.attribs = {
position: 0,
color: 1
};
this.program = linkProgram(gl, sevenSegmentVS, sevenSegmentFS, this.attribs);
this.uniforms = getProgramUniforms(gl, this.program);
var verts = [];
var segmentIndices = {};
var indices = [];
var width = 0.5;
var thickness = 0.25;
this.kerning = 2.0;
this.matrix = new Float32Array(16);
function defineSegment(id, left, top, right, bottom) {
var idx = verts.length / 2;
verts.push(
left, top,
right, top,
right, bottom,
left, bottom);
segmentIndices[id] = [
idx, idx+2, idx+1,
idx, idx+3, idx+2];
}
var characters = {};
this.characters = characters;
function defineCharacter(c, segments) {
var character = {
character: c,
offset: indices.length * 2,
count: 0
};
for (var i = 0; i < segments.length; ++i) {
var idx = segments[i];
var segment = segmentIndices[idx];
character.count += segment.length;
indices.push.apply(indices, segment);
}
characters[c] = character;
}
/* Segment layout is as follows:
|-0-|
3 4
|-1-|
5 6
|-2-|
*/
defineSegment(0, -1, 1, width, 1-thickness);
defineSegment(1, -1, thickness*0.5, width, -thickness*0.5);
defineSegment(2, -1, -1+thickness, width, -1);
defineSegment(3, -1, 1, -1+thickness, -thickness*0.5);
defineSegment(4, width-thickness, 1, width, -thickness*0.5);
defineSegment(5, -1, thickness*0.5, -1+thickness, -1);
defineSegment(6, width-thickness, thickness*0.5, width, -1);
defineCharacter("0", [0, 2, 3, 4, 5, 6]);
defineCharacter("1", [4, 6]);
defineCharacter("2", [0, 1, 2, 4, 5]);
defineCharacter("3", [0, 1, 2, 4, 6]);
defineCharacter("4", [1, 3, 4, 6]);
defineCharacter("5", [0, 1, 2, 3, 6]);
defineCharacter("6", [0, 1, 2, 3, 5, 6]);
defineCharacter("7", [0, 4, 6]);
defineCharacter("8", [0, 1, 2, 3, 4, 5, 6]);
defineCharacter("9", [0, 1, 2, 3, 4, 6]);
defineCharacter("A", [0, 1, 3, 4, 5, 6]);
defineCharacter("B", [1, 2, 3, 5, 6]);
defineCharacter("C", [0, 2, 3, 5]);
defineCharacter("D", [1, 2, 4, 5, 6]);
defineCharacter("E", [0, 1, 2, 4, 6]);
defineCharacter("F", [0, 1, 3, 5]);
defineCharacter("P", [0, 1, 3, 4, 5]);
defineCharacter("-", [1]);
defineCharacter(" ", []);
defineCharacter("_", [2]); // Used for undefined characters
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.DYNAMIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
};
SevenSegmentText.prototype.render = function(projectionMat, modelViewMat, text, r, g, b, a) {
var gl = this.gl;
if (r == undefined || g == undefined || b == undefined) {
r = 0.0;
g = 1.0;
b = 0.0;
}
if (a == undefined)
a = 1.0;
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
gl.uniform4f(this.uniforms.color, r, g, b, a);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(this.attribs.position);
gl.vertexAttribPointer(this.attribs.position, 2, gl.FLOAT, false, 8, 0);
text = text.toUpperCase();
var offset = 0;
for (var i = 0; i < text.length; ++i) {
var c;
if (text[i] in this.characters) {
c = this.characters[text[i]];
} else {
c = this.characters["_"];
}
if (c.count != 0) {
mat4_fromTranslation(this.matrix, [offset, 0, 0]);
mat4_multiply(this.matrix, modelViewMat, this.matrix);
gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, this.matrix);
gl.drawElements(gl.TRIANGLES, c.count, gl.UNSIGNED_SHORT, c.offset);
}
offset += this.kerning;
}
}
//-----------
// FPS Graph
//-----------
var statsVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec3 color;",
"varying vec4 vColor;",
"void main() {",
" vColor = vec4(color, 1.0);",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var statsFS = [
"precision mediump float;",
"varying vec4 vColor;",
"void main() {",
" gl_FragColor = vColor;",
"}",
].join("\n");
var segments = 30;
var maxFPS = 90;
function segmentToX(i) {
return ((0.9/segments) * i) - 0.45;
}
function fpsToY(value) {
return (Math.min(value, maxFPS) * (0.7 / maxFPS)) - 0.45;
}
function fpsToRGB(value) {
return {
r: Math.max(0.0, Math.min(1.0, 1.0 - (value/60))),
g: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15)))),
b: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15))))
};
}
var now = /*( performance && performance.now ) ? performance.now.bind( performance ) :*/ Date.now;
var Stats = function(gl) {
this.gl = gl;
this.sevenSegmentText = new SevenSegmentText(gl);
this.startTime = now();
this.prevTime = this.startTime;
this.frames = 0;
this.fps = 0;
this.orthoProjMatrix = new Float32Array(16);
this.orthoViewMatrix = new Float32Array(16);
this.modelViewMatrix = new Float32Array(16);
// Hard coded because it doesn't change:
// Scale by 0.075 in X and Y
// Translate into upper left corner w/ z = 0.02
this.textMatrix = new Float32Array([
0.075, 0, 0, 0,
0, 0.075, 0, 0,
0, 0, 1, 0,
-0.3625, 0.3625, 0.02, 1
]);
this.lastSegment = 0;
this.attribs = {
position: 0,
color: 1
};
this.program = linkProgram(gl, statsVS, statsFS, this.attribs);
this.uniforms = getProgramUniforms(gl, this.program);
var fpsVerts = [];
var fpsIndices = [];
// Graph geometry
for (var i = 0; i < segments; ++i) {
// Bar top
fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
// Bar bottom
fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
var idx = i * 4;
fpsIndices.push(idx, idx+3, idx+1,
idx+3, idx, idx+2);
}
function addBGSquare(left, bottom, right, top, z, r, g, b) {
var idx = fpsVerts.length / 6;
fpsVerts.push(left, bottom, z, r, g, b);
fpsVerts.push(right, top, z, r, g, b);
fpsVerts.push(left, top, z, r, g, b);
fpsVerts.push(right, bottom, z, r, g, b);
fpsIndices.push(idx, idx+1, idx+2,
idx, idx+3, idx+1);
};
// Panel Background
addBGSquare(-0.5, -0.5, 0.5, 0.5, 0.0, 0.0, 0.0, 0.125);
// FPS Background
addBGSquare(-0.45, -0.45, 0.45, 0.25, 0.01, 0.0, 0.0, 0.4);
// 30 FPS line
addBGSquare(-0.45, fpsToY(30), 0.45, fpsToY(32), 0.015, 0.5, 0.0, 0.5);
// 60 FPS line
addBGSquare(-0.45, fpsToY(60), 0.45, fpsToY(62), 0.015, 0.2, 0.0, 0.75);
this.fpsVertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(fpsVerts), gl.DYNAMIC_DRAW);
this.fpsIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(fpsIndices), gl.STATIC_DRAW);
this.fpsIndexCount = fpsIndices.length;
};
Stats.prototype.begin = function() {
this.startTime = now();
};
Stats.prototype.end = function() {
var time = now();
this.frames++;
if (time > this.prevTime + 250) {
this.fps = Math.round((this.frames * 1000) / (time - this.prevTime));
this.updateGraph(this.fps);
this.prevTime = time;
this.frames = 0;
}
};
Stats.prototype.updateGraph = function(value) {
var gl = this.gl;
var color = fpsToRGB(value);
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
// Update the current segment with the new FPS value
var updateVerts = [
segmentToX(this.lastSegment), fpsToY(value), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(value), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
];
// Re-shape the next segment into the green "progress" line
color.r = 0.2;
color.g = 1.0;
color.b = 0.2;
if (this.lastSegment == segments - 1) {
// If we're updating the last segment we need to do two bufferSubDatas
// to update the segment and turn the first segment into the progress line.
gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
updateVerts = [
segmentToX(0), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(0), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(.25), fpsToY(0), 0.02, color.r, color.g, color.b
];
gl.bufferSubData(gl.ARRAY_BUFFER, 0, new Float32Array(updateVerts));
} else {
updateVerts.push(
segmentToX(this.lastSegment+1), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1.25), fpsToY(0), 0.02, color.r, color.g, color.b
);
gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
}
this.lastSegment = (this.lastSegment+1) % segments;
};
Stats.prototype.render = function(projectionMat, modelViewMat) {
var gl = this.gl;
// Render text first, minor win for early fragment discard
mat4_multiply(this.modelViewMatrix, modelViewMat, this.textMatrix);
this.sevenSegmentText.render(projectionMat, this.modelViewMatrix, this.fps + " FP5");
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, modelViewMat);
gl.enableVertexAttribArray(this.attribs.position);
gl.enableVertexAttribArray(this.attribs.color);
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
gl.vertexAttribPointer(this.attribs.position, 3, gl.FLOAT, false, 24, 0);
gl.vertexAttribPointer(this.attribs.color, 3, gl.FLOAT, false, 24, 12);
// Draw the graph and background in a single call
gl.drawElements(gl.TRIANGLES, this.fpsIndexCount, gl.UNSIGNED_SHORT, 0);
}
Stats.prototype.renderOrtho = function(x, y, width, height) {
var canvas = this.gl.canvas;
if (x == undefined || y == undefined) {
x = 10 * window.devicePixelRatio;
y = 10 * window.devicePixelRatio;
}
if (width == undefined || height == undefined) {
width = 75 * window.devicePixelRatio;
height = 75 * window.devicePixelRatio;
}
mat4_ortho(this.orthoProjMatrix, 0, canvas.width, 0, canvas.height, 0.1, 1024);
mat4_identity(this.orthoViewMatrix);
mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [x, canvas.height - height - y, -1]);
mat4_scale(this.orthoViewMatrix, this.orthoViewMatrix, [width, height, 1]);
mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [0.5, 0.5, 0]);
this.render(this.orthoProjMatrix, this.orthoViewMatrix);
}
return Stats;
})();

View file

@ -0,0 +1,687 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Handles loading of textures of mutliple formats, tries to be efficent about it.
Formats supported will vary by devices. Use the .supports<format>() functions
to determine if a format is supported. Most of the time you can just call
loader.loadTexture("url"); and it will handle it based on the extension.
If the extension can't be relied on use the corresponding
.load<Extension>("url") calls.
*/
var WGLUTextureLoader = (function() {
"use strict";
//============================//
// DXT constants and utilites //
//============================//
// Utility functions
// Builds a numeric code for a given fourCC string
function fourCCToInt32(value) {
return value.charCodeAt(0) +
(value.charCodeAt(1) << 8) +
(value.charCodeAt(2) << 16) +
(value.charCodeAt(3) << 24);
}
// Turns a fourCC numeric code into a string
function int32ToFourCC(value) {
return String.fromCharCode(
value & 0xff,
(value >> 8) & 0xff,
(value >> 16) & 0xff,
(value >> 24) & 0xff
);
}
// Calcualates the size of a compressed texture level in bytes
function textureLevelSize(format, width, height) {
switch (format) {
case COMPRESSED_RGB_S3TC_DXT1_EXT:
case COMPRESSED_RGB_ATC_WEBGL:
case COMPRESSED_RGB_ETC1_WEBGL:
return ((width + 3) >> 2) * ((height + 3) >> 2) * 8;
case COMPRESSED_RGBA_S3TC_DXT3_EXT:
case COMPRESSED_RGBA_S3TC_DXT5_EXT:
case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
return ((width + 3) >> 2) * ((height + 3) >> 2) * 16;
case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
return Math.floor((Math.max(width, 8) * Math.max(height, 8) * 4 + 7) / 8);
case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
return Math.floor((Math.max(width, 16) * Math.max(height, 8) * 2 + 7) / 8);
default:
return 0;
}
}
// DXT formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_s3tc/
var COMPRESSED_RGB_S3TC_DXT1_EXT = 0x83F0;
var COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1;
var COMPRESSED_RGBA_S3TC_DXT3_EXT = 0x83F2;
var COMPRESSED_RGBA_S3TC_DXT5_EXT = 0x83F3;
// ATC formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_atc/
var COMPRESSED_RGB_ATC_WEBGL = 0x8C92;
var COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL = 0x8C93;
var COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL = 0x87EE;
// DXT values and structures referenced from:
// http://msdn.microsoft.com/en-us/library/bb943991.aspx/
var DDS_MAGIC = 0x20534444;
var DDSD_MIPMAPCOUNT = 0x20000;
var DDPF_FOURCC = 0x4;
var DDS_HEADER_LENGTH = 31; // The header length in 32 bit ints.
// Offsets into the header array.
var DDS_HEADER_MAGIC = 0;
var DDS_HEADER_SIZE = 1;
var DDS_HEADER_FLAGS = 2;
var DDS_HEADER_HEIGHT = 3;
var DDS_HEADER_WIDTH = 4;
var DDS_HEADER_MIPMAPCOUNT = 7;
var DDS_HEADER_PF_FLAGS = 20;
var DDS_HEADER_PF_FOURCC = 21;
// FourCC format identifiers.
var FOURCC_DXT1 = fourCCToInt32("DXT1");
var FOURCC_DXT3 = fourCCToInt32("DXT3");
var FOURCC_DXT5 = fourCCToInt32("DXT5");
var FOURCC_ATC = fourCCToInt32("ATC ");
var FOURCC_ATCA = fourCCToInt32("ATCA");
var FOURCC_ATCI = fourCCToInt32("ATCI");
//==================//
// Crunch constants //
//==================//
// Taken from crnlib.h
var CRN_FORMAT = {
cCRNFmtInvalid: -1,
cCRNFmtDXT1: 0,
// cCRNFmtDXT3 is not currently supported when writing to CRN - only DDS.
cCRNFmtDXT3: 1,
cCRNFmtDXT5: 2
// Crunch supports more formats than this, but we can't use them here.
};
// Mapping of Crunch formats to DXT formats.
var DXT_FORMAT_MAP = {};
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT1] = COMPRESSED_RGB_S3TC_DXT1_EXT;
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT3] = COMPRESSED_RGBA_S3TC_DXT3_EXT;
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT5] = COMPRESSED_RGBA_S3TC_DXT5_EXT;
//===============//
// PVR constants //
//===============//
// PVR formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_pvrtc/
var COMPRESSED_RGB_PVRTC_4BPPV1_IMG = 0x8C00;
var COMPRESSED_RGB_PVRTC_2BPPV1_IMG = 0x8C01;
var COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = 0x8C02;
var COMPRESSED_RGBA_PVRTC_2BPPV1_IMG = 0x8C03;
// ETC1 format, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_etc1/
var COMPRESSED_RGB_ETC1_WEBGL = 0x8D64;
var PVR_FORMAT_2BPP_RGB = 0;
var PVR_FORMAT_2BPP_RGBA = 1;
var PVR_FORMAT_4BPP_RGB = 2;
var PVR_FORMAT_4BPP_RGBA = 3;
var PVR_FORMAT_ETC1 = 6;
var PVR_FORMAT_DXT1 = 7;
var PVR_FORMAT_DXT3 = 9;
var PVR_FORMAT_DXT5 = 5;
var PVR_HEADER_LENGTH = 13; // The header length in 32 bit ints.
var PVR_MAGIC = 0x03525650; //0x50565203;
// Offsets into the header array.
var PVR_HEADER_MAGIC = 0;
var PVR_HEADER_FORMAT = 2;
var PVR_HEADER_HEIGHT = 6;
var PVR_HEADER_WIDTH = 7;
var PVR_HEADER_MIPMAPCOUNT = 11;
var PVR_HEADER_METADATA = 12;
//============//
// Misc Utils //
//============//
// When an error occurs set the texture to a 1x1 black pixel
// This prevents WebGL errors from attempting to use unrenderable textures
// and clears out stale data if we're re-using a texture.
function clearOnError(gl, error, texture, callback) {
if (console) {
console.error(error);
}
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1, 1, 0, gl.RGB, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0]));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
// Notify the user that an error occurred and the texture is ready.
if (callback) { callback(texture, error, null); }
}
function isPowerOfTwo(n) {
return (n & (n - 1)) === 0;
}
function getExtension(gl, name) {
var vendorPrefixes = ["", "WEBKIT_", "MOZ_"];
var ext = null;
for (var i in vendorPrefixes) {
ext = gl.getExtension(vendorPrefixes[i] + name);
if (ext) { break; }
}
return ext;
}
//==================//
// DDS File Reading //
//==================//
// Parse a DDS file and provide information about the raw DXT data it contains to the given callback.
function parseDDS(arrayBuffer, callback, errorCallback) {
// Callbacks must be provided.
if (!callback || !errorCallback) { return; }
// Get a view of the arrayBuffer that represents the DDS header.
var header = new Int32Array(arrayBuffer, 0, DDS_HEADER_LENGTH);
// Do some sanity checks to make sure this is a valid DDS file.
if(header[DDS_HEADER_MAGIC] != DDS_MAGIC) {
errorCallback("Invalid magic number in DDS header");
return 0;
}
if(!header[DDS_HEADER_PF_FLAGS] & DDPF_FOURCC) {
errorCallback("Unsupported format, must contain a FourCC code");
return 0;
}
// Determine what type of compressed data the file contains.
var fourCC = header[DDS_HEADER_PF_FOURCC];
var internalFormat;
switch(fourCC) {
case FOURCC_DXT1:
internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
break;
case FOURCC_DXT3:
internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
break;
case FOURCC_DXT5:
internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
break;
case FOURCC_ATC:
internalFormat = COMPRESSED_RGB_ATC_WEBGL;
break;
case FOURCC_ATCA:
internalFormat = COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL;
break;
case FOURCC_ATCI:
internalFormat = COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL;
break;
default:
errorCallback("Unsupported FourCC code: " + int32ToFourCC(fourCC));
return;
}
// Determine how many mipmap levels the file contains.
var levels = 1;
if(header[DDS_HEADER_FLAGS] & DDSD_MIPMAPCOUNT) {
levels = Math.max(1, header[DDS_HEADER_MIPMAPCOUNT]);
}
// Gather other basic metrics and a view of the raw the DXT data.
var width = header[DDS_HEADER_WIDTH];
var height = header[DDS_HEADER_HEIGHT];
var dataOffset = header[DDS_HEADER_SIZE] + 4;
var dxtData = new Uint8Array(arrayBuffer, dataOffset);
// Pass the DXT information to the callback for uploading.
callback(dxtData, width, height, levels, internalFormat);
}
//==================//
// PVR File Reading //
//==================//
// Parse a PVR file and provide information about the raw texture data it contains to the given callback.
function parsePVR(arrayBuffer, callback, errorCallback) {
// Callbacks must be provided.
if (!callback || !errorCallback) { return; }
// Get a view of the arrayBuffer that represents the DDS header.
var header = new Int32Array(arrayBuffer, 0, PVR_HEADER_LENGTH);
// Do some sanity checks to make sure this is a valid DDS file.
if(header[PVR_HEADER_MAGIC] != PVR_MAGIC) {
errorCallback("Invalid magic number in PVR header");
return 0;
}
// Determine what type of compressed data the file contains.
var format = header[PVR_HEADER_FORMAT];
var internalFormat;
switch(format) {
case PVR_FORMAT_2BPP_RGB:
internalFormat = COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
break;
case PVR_FORMAT_2BPP_RGBA:
internalFormat = COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
break;
case PVR_FORMAT_4BPP_RGB:
internalFormat = COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
break;
case PVR_FORMAT_4BPP_RGBA:
internalFormat = COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
break;
case PVR_FORMAT_ETC1:
internalFormat = COMPRESSED_RGB_ETC1_WEBGL;
break;
case PVR_FORMAT_DXT1:
internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
break;
case PVR_FORMAT_DXT3:
internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
break;
case PVR_FORMAT_DXT5:
internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
break;
default:
errorCallback("Unsupported PVR format: " + format);
return;
}
// Gather other basic metrics and a view of the raw the DXT data.
var width = header[PVR_HEADER_WIDTH];
var height = header[PVR_HEADER_HEIGHT];
var levels = header[PVR_HEADER_MIPMAPCOUNT];
var dataOffset = header[PVR_HEADER_METADATA] + 52;
var pvrtcData = new Uint8Array(arrayBuffer, dataOffset);
// Pass the PVRTC information to the callback for uploading.
callback(pvrtcData, width, height, levels, internalFormat);
}
//=============//
// IMG loading //
//=============//
/*
This function provides a method for loading webgl textures using a pool of
image elements, which has very low memory overhead. For more details see:
http://blog.tojicode.com/2012/03/javascript-memory-optimization-and.html
*/
var loadImgTexture = (function createTextureLoader() {
var MAX_CACHE_IMAGES = 16;
var textureImageCache = new Array(MAX_CACHE_IMAGES);
var cacheTop = 0;
var remainingCacheImages = MAX_CACHE_IMAGES;
var pendingTextureRequests = [];
var TextureImageLoader = function(loadedCallback) {
var self = this;
var blackPixel = new Uint8Array([0, 0, 0]);
this.gl = null;
this.texture = null;
this.callback = null;
this.image = new Image();
this.image.crossOrigin = 'anonymous';
this.image.addEventListener('load', function() {
var gl = self.gl;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
var startTime = Date.now();
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, self.image);
if (isPowerOfTwo(self.image.width) && isPowerOfTwo(self.image.height)) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
gl.generateMipmap(gl.TEXTURE_2D);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
var uploadTime = Date.now() - startTime;
if(self.callback) {
var stats = {
width: self.image.width,
height: self.image.height,
internalFormat: gl.RGBA,
levelZeroSize: self.image.width * self.image.height * 4,
uploadTime: uploadTime
};
self.callback(self.texture, null, stats);
}
loadedCallback(self);
}, false);
this.image.addEventListener('error', function(ev) {
clearOnError(self.gl, 'Image could not be loaded: ' + self.image.src, self.texture, self.callback);
loadedCallback(self);
}, false);
};
TextureImageLoader.prototype.loadTexture = function(gl, src, texture, callback) {
this.gl = gl;
this.texture = texture;
this.callback = callback;
this.image.src = src;
};
var PendingTextureRequest = function(gl, src, texture, callback) {
this.gl = gl;
this.src = src;
this.texture = texture;
this.callback = callback;
};
function releaseTextureImageLoader(til) {
var req;
if(pendingTextureRequests.length) {
req = pendingTextureRequests.shift();
til.loadTexture(req.gl, req.src, req.texture, req.callback);
} else {
textureImageCache[cacheTop++] = til;
}
}
return function(gl, src, texture, callback) {
var til;
if(cacheTop) {
til = textureImageCache[--cacheTop];
til.loadTexture(gl, src, texture, callback);
} else if (remainingCacheImages) {
til = new TextureImageLoader(releaseTextureImageLoader);
til.loadTexture(gl, src, texture, callback);
--remainingCacheImages;
} else {
pendingTextureRequests.push(new PendingTextureRequest(gl, src, texture, callback));
}
return texture;
};
})();
//=====================//
// TextureLoader Class //
//=====================//
// This class is our public interface.
var TextureLoader = function(gl) {
this.gl = gl;
// Load the compression format extensions, if available
this.dxtExt = getExtension(gl, "WEBGL_compressed_texture_s3tc");
this.pvrtcExt = getExtension(gl, "WEBGL_compressed_texture_pvrtc");
this.atcExt = getExtension(gl, "WEBGL_compressed_texture_atc");
this.etc1Ext = getExtension(gl, "WEBGL_compressed_texture_etc1");
// Returns whether or not the compressed format is supported by the WebGL implementation
TextureLoader.prototype._formatSupported = function(format) {
switch (format) {
case COMPRESSED_RGB_S3TC_DXT1_EXT:
case COMPRESSED_RGBA_S3TC_DXT3_EXT:
case COMPRESSED_RGBA_S3TC_DXT5_EXT:
return !!this.dxtExt;
case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
return !!this.pvrtcExt;
case COMPRESSED_RGB_ATC_WEBGL:
case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
return !!this.atcExt;
case COMPRESSED_RGB_ETC1_WEBGL:
return !!this.etc1Ext;
default:
return false;
}
}
// Uploads compressed texture data to the GPU.
TextureLoader.prototype._uploadCompressedData = function(data, width, height, levels, internalFormat, texture, callback) {
var gl = this.gl;
gl.bindTexture(gl.TEXTURE_2D, texture);
var offset = 0;
var stats = {
width: width,
height: height,
internalFormat: internalFormat,
levelZeroSize: textureLevelSize(internalFormat, width, height),
uploadTime: 0
};
var startTime = Date.now();
// Loop through each mip level of compressed texture data provided and upload it to the given texture.
for (var i = 0; i < levels; ++i) {
// Determine how big this level of compressed texture data is in bytes.
var levelSize = textureLevelSize(internalFormat, width, height);
// Get a view of the bytes for this level of DXT data.
var dxtLevel = new Uint8Array(data.buffer, data.byteOffset + offset, levelSize);
// Upload!
gl.compressedTexImage2D(gl.TEXTURE_2D, i, internalFormat, width, height, 0, dxtLevel);
// The next mip level will be half the height and width of this one.
width = width >> 1;
height = height >> 1;
// Advance the offset into the compressed texture data past the current mip level's data.
offset += levelSize;
}
stats.uploadTime = Date.now() - startTime;
// We can't use gl.generateMipmaps with compressed textures, so only use
// mipmapped filtering if the compressed texture data contained mip levels.
if (levels > 1) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
// Notify the user that the texture is ready.
if (callback) { callback(texture, null, stats); }
}
TextureLoader.prototype.supportsDXT = function() {
return !!this.dxtExt;
}
TextureLoader.prototype.supportsPVRTC = function() {
return !!this.pvrtcExt;
}
TextureLoader.prototype.supportsATC = function() {
return !!this.atcExt;
}
TextureLoader.prototype.supportsETC1 = function() {
return !!this.etc1Ext;
}
// Loads a image file into the given texture.
// Supports any format that can be loaded into an img tag
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadIMG = function(src, texture, callback) {
if(!texture) {
texture = this.gl.createTexture();
}
loadImgTexture(gl, src, texture, callback);
return texture;
}
// Loads a DDS file into the given texture.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadDDS = function(src, texture, callback) {
var self = this;
if (!texture) {
texture = this.gl.createTexture();
}
// Load the file via XHR.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function (ev) {
if (xhr.status == 200) {
// If the file loaded successfully parse it.
parseDDS(xhr.response, function(dxtData, width, height, levels, internalFormat) {
if (!self._formatSupported(internalFormat)) {
clearOnError(self.gl, "Texture format not supported", texture, callback);
return;
}
// Upload the parsed DXT data to the texture.
self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
}, function(error) {
clearOnError(self.gl, error, texture, callback);
});
} else {
clearOnError(self.gl, xhr.statusText, texture, callback);
}
}, false);
xhr.open('GET', src, true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
return texture;
}
// Loads a PVR file into the given texture.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadPVR = function(src, texture, callback) {
var self = this;
if(!texture) {
texture = this.gl.createTexture();
}
// Load the file via XHR.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function (ev) {
if (xhr.status == 200) {
// If the file loaded successfully parse it.
parsePVR(xhr.response, function(dxtData, width, height, levels, internalFormat) {
if (!self._formatSupported(internalFormat)) {
clearOnError(self.gl, "Texture format not supported", texture, callback);
return;
}
// Upload the parsed PVR data to the texture.
self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
}, function(error) {
clearOnError(self.gl, error, texture, callback);
});
} else {
clearOnError(self.gl, xhr.statusText, texture, callback);
}
}, false);
xhr.open('GET', src, true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
return texture;
}
// Loads a texture from a file. Guesses the type based on extension.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadTexture = function(src, texture, callback) {
// Shamelessly lifted from StackOverflow :)
// http://stackoverflow.com/questions/680929
var re = /(?:\.([^.]+))?$/;
var ext = re.exec(src)[1] || '';
ext = ext.toLowerCase();
switch(ext) {
case 'dds':
return this.loadDDS(src, texture, callback);
case 'pvr':
return this.loadPVR(src, texture, callback);
default:
return this.loadIMG(src, texture, callback);
}
}
// Sets a texture to a solid RGBA color
// If no texture is provided one is created and returned.
TextureLoader.prototype.makeSolidColor = function(r, g, b, a, texture) {
var gl = this.gl;
var data = new Uint8Array([r, g, b, a]);
if(!texture) {
texture = gl.createTexture();
}
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
return texture;
}
}
return TextureLoader;
})();

View file

@ -0,0 +1,94 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Provides a simple way to get values from the query string if they're present
and use a default value if not. Not strictly a "WebGL" utility, but I use it
frequently enough for debugging that I wanted to include it here.
Example:
For the URL http://example.com/index.html?particleCount=1000
WGLUUrl.getInt("particleCount", 100); // URL overrides, returns 1000
WGLUUrl.getInt("particleSize", 10); // Not in URL, returns default of 10
*/
var WGLUUrl = (function() {
"use strict";
var urlArgs = null;
function ensureArgsCached() {
if (!urlArgs) {
urlArgs = {};
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i = 0; i < vars.length; i++) {
var pair = vars[i].split("=");
urlArgs[pair[0].toLowerCase()] = unescape(pair[1]);
}
}
}
function getString(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return urlArgs[lcaseName];
}
return defaultValue;
}
function getInt(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseInt(urlArgs[lcaseName], 10);
}
return defaultValue;
}
function getFloat(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseFloat(urlArgs[lcaseName]);
}
return defaultValue;
}
function getBool(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseInt(urlArgs[lcaseName], 10) != 0;
}
return defaultValue;
}
return {
getString: getString,
getInt: getInt,
getFloat: getFloat,
getBool: getBool
};
})();

View file

@ -0,0 +1,284 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function (VRAudioPanner) {
'use strict';
// Default settings for panning. Cone parameters are experimentally
// determined.
var _PANNING_MODEL = 'HRTF';
var _DISTANCE_MODEL = 'inverse';
var _CONE_INNER_ANGLE = 60;
var _CONE_OUTER_ANGLE = 120;
var _CONE_OUTER_GAIN = 0.25;
// Super-simple web audio version detection.
var _LEGACY_WEBAUDIO = window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext');
if (_LEGACY_WEBAUDIO)
console.log('[VRAudioPanner] outdated version of Web Audio API detected.');
// Master audio context.
var _context = _LEGACY_WEBAUDIO ? new webkitAudioContext() : new AudioContext();
/**
* A buffer source player with HRTF panning for testing purpose.
* @param {Object} options Default options.
* @param {Number} options.gain Sound object gain. (0.0~1.0)
* @param {Number} options.buffer AudioBuffer to play.
* @param {Number} options.detune Detune parameter. (cent)
* @param {Array} options.position x, y, z position in a array.
*/
function TestSource (options) {
this._src = _context.createBufferSource();
this._out = _context.createGain();
this._panner = _context.createPanner();
this._analyser = _context.createAnalyser();
this._src.connect(this._out);
this._out.connect(this._analyser);
this._analyser.connect(this._panner);
this._panner.connect(_context.destination);
this._src.buffer = options.buffer;
this._src.loop = true;
this._out.gain.value = options.gain;
this._analyser.fftSize = 1024;
this._analyser.smoothingTimeConstant = 0.85;
this._lastRMSdB = 0.0;
this._panner.panningModel = _PANNING_MODEL;
this._panner.distanceModel = _DISTANCE_MODEL;
this._panner.coneInnerAngle = _CONE_INNER_ANGLE;
this._panner.coneOuterAngle = _CONE_OUTER_ANGLE;
this._panner.coneOuterGain = _CONE_OUTER_GAIN;
this._position = [0, 0, 0];
this._orientation = [1, 0, 0];
this._analyserBuffer = new Uint8Array(this._analyser.fftSize);
if (!_LEGACY_WEBAUDIO) {
this._src.detune.value = (options.detune || 0);
this._analyserBuffer = new Float32Array(this._analyser.fftSize);
}
this.setPosition(options.position);
this.setOrientation(options.orientation);
};
TestSource.prototype.start = function () {
this._src.start(0);
};
TestSource.prototype.stop = function () {
this._src.stop(0);
};
TestSource.prototype.getPosition = function () {
return this._position;
};
TestSource.prototype.setPosition = function (position) {
if (position) {
this._position[0] = position[0];
this._position[1] = position[1];
this._position[2] = position[2];
}
this._panner.setPosition.apply(this._panner, this._position);
};
TestSource.prototype.getOrientation = function () {
return this._orientation;
};
TestSource.prototype.setOrientation = function (orientation) {
if (orientation) {
this._orientation[0] = orientation[0];
this._orientation[1] = orientation[1];
this._orientation[2] = orientation[2];
}
this._panner.setOrientation.apply(this._panner, this._orientation);
};
TestSource.prototype.getCubeScale = function () {
// Safari does not support getFloatTimeDomainData(), so fallback to the
// naive spectral energy sum. This is relative expensive.
if (_LEGACY_WEBAUDIO) {
this._analyser.getByteFrequencyData(this._analyserBuffer);
for (var k = 0, total = 0; k < this._analyserBuffer.length; ++k)
total += this._analyserBuffer[k];
total /= this._analyserBuffer.length;
return (total / 256.0) * 1.5;
}
this._analyser.getFloatTimeDomainData(this._analyserBuffer);
for (var i = 0, sum = 0; i < this._analyserBuffer.length; ++i)
sum += this._analyserBuffer[i] * this._analyserBuffer[i];
// Calculate RMS and convert it to DB for perceptual loudness.
var rms = Math.sqrt(sum / this._analyserBuffer.length);
var db = 30 + 10 / Math.LN10 * Math.log(rms <= 0 ? 0.0001 : rms);
// Moving average with the alpha of 0.525. Experimentally determined.
this._lastRMSdB += 0.525 * ((db < 0 ? 0 : db) - this._lastRMSdB);
// Scaling by 1/30 is also experimentally determined.
return this._lastRMSdB / 30.0;
};
// Internal helper: load a file into a buffer. (github.com/hoch/spiral)
function _loadAudioFile(context, fileInfo, done) {
var xhr = new XMLHttpRequest();
xhr.open('GET', fileInfo.url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
if (xhr.status === 200) {
context.decodeAudioData(xhr.response,
function (buffer) {
console.log('[VRAudioPanner] File loaded: ' + fileInfo.url);
done(fileInfo.name, buffer);
},
function (message) {
console.log('[VRAudioPanner] Decoding failure: ' + fileInfo.url + ' (' + message + ')');
done(fileInfo.name, null);
});
} else {
console.log('[VRAudioPanner] XHR Error: ' + fileInfo.url + ' (' + xhr.statusText + ')');
done(fileInfo.name, null);
}
};
xhr.onerror = function (event) {
console.log('[VRAudioPanner] XHR Network failure: ' + fileInfo.url);
done(fileInfo.name, null);
};
xhr.send();
}
/**
* A wrapper/container class for multiple file loaders.
* @param {Object} context AudioContext
* @param {Object} audioFileData Audio file info in the format of {name, url}
* @param {Function} resolve Resolution handler for promise.
* @param {Function} reject Rejection handler for promise.
* @param {Function} progress Progress event handler.
*/
function AudioBufferManager(context, audioFileData, resolve, reject, progress) {
this._context = context;
this._resolve = resolve;
this._reject = reject;
this._progress = progress;
this._buffers = new Map();
this._loadingTasks = {};
// Iterating file loading.
for (var i = 0; i < audioFileData.length; i++) {
var fileInfo = audioFileData[i];
// Check for duplicates filename and quit if it happens.
if (this._loadingTasks.hasOwnProperty(fileInfo.name)) {
console.log('[VRAudioPanner] Duplicated filename in AudioBufferManager: ' + fileInfo.name);
return;
}
// Mark it as pending (0)
this._loadingTasks[fileInfo.name] = 0;
_loadAudioFile(this._context, fileInfo, this._done.bind(this));
}
}
AudioBufferManager.prototype._done = function (filename, buffer) {
// Label the loading task.
this._loadingTasks[filename] = buffer !== null ? 'loaded' : 'failed';
// A failed task will be a null buffer.
this._buffers.set(filename, buffer);
this._updateProgress(filename);
};
AudioBufferManager.prototype._updateProgress = function (filename) {
var numberOfFinishedTasks = 0, numberOfFailedTask = 0;
var numberOfTasks = 0;
for (var task in this._loadingTasks) {
numberOfTasks++;
if (this._loadingTasks[task] === 'loaded')
numberOfFinishedTasks++;
else if (this._loadingTasks[task] === 'failed')
numberOfFailedTask++;
}
if (typeof this._progress === 'function')
this._progress(filename, numberOfFinishedTasks, numberOfTasks);
if (numberOfFinishedTasks === numberOfTasks)
this._resolve(this._buffers);
if (numberOfFinishedTasks + numberOfFailedTask === numberOfTasks)
this._reject(this._buffers);
};
/**
* Returns true if the web audio implementation is outdated.
* @return {Boolean}
*/
VRAudioPanner.isWebAudioOutdated = function () {
return _LEGACY_WEBAUDIO;
}
/**
* Static method for updating listener's position.
* @param {Array} position Listener position in x, y, z.
*/
VRAudioPanner.setListenerPosition = function (position) {
_context.listener.setPosition.apply(_context.listener, position);
};
/**
* Static method for updating listener's orientation.
* @param {Array} orientation Listener orientation in x, y, z.
* @param {Array} orientation Listener's up vector in x, y, z.
*/
VRAudioPanner.setListenerOrientation = function (orientation, upvector) {
_context.listener.setOrientation(
orientation[0], orientation[1], orientation[2],
upvector[0], upvector[1], upvector[2]);
};
/**
* Load an audio file asynchronously.
* @param {Array} dataModel Audio file info in the format of {name, url}
* @param {Function} onprogress Callback function for reporting the progress.
* @return {Promise} Promise.
*/
VRAudioPanner.loadAudioFiles = function (dataModel, onprogress) {
return new Promise(function (resolve, reject) {
new AudioBufferManager(_context, dataModel, resolve, reject, onprogress);
});
};
/**
* Create a source player. See TestSource class for parameter description.
* @return {TestSource}
*/
VRAudioPanner.createTestSource = function (options) {
return new TestSource(options);
};
})(VRAudioPanner = {});

View file

@ -0,0 +1,210 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
/*
Like CubeSea, but designed around a users physical space. One central platform
that maps to the users play area and several floating cubes that sit just
those boundries (just to add visual interest)
*/
window.VRCubeIsland = (function () {
"use strict";
var cubeIslandVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var cubeIslandFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var CubeIsland = function (gl, texture, width, depth) {
this.gl = gl;
this.statsMat = mat4.create();
this.texture = texture;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(cubeIslandVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(cubeIslandFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
this.vertBuffer = gl.createBuffer();
this.indexBuffer = gl.createBuffer();
this.resize(width, depth);
};
CubeIsland.prototype.resize = function (width, depth) {
var gl = this.gl;
this.width = width;
this.depth = depth;
var cubeVerts = [];
var cubeIndices = [];
// Build a single box.
function appendBox (left, bottom, back, right, top, front) {
// Bottom
var idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(left, bottom, back, 0.0, 1.0);
cubeVerts.push(right, bottom, back, 1.0, 1.0);
cubeVerts.push(right, bottom, front, 1.0, 0.0);
cubeVerts.push(left, bottom, front, 0.0, 0.0);
// Top
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, top, back, 0.0, 0.0);
cubeVerts.push(right, top, back, 1.0, 0.0);
cubeVerts.push(right, top, front, 1.0, 1.0);
cubeVerts.push(left, top, front, 0.0, 1.0);
// Left
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, bottom, back, 0.0, 1.0);
cubeVerts.push(left, top, back, 0.0, 0.0);
cubeVerts.push(left, top, front, 1.0, 0.0);
cubeVerts.push(left, bottom, front, 1.0, 1.0);
// Right
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(right, bottom, back, 1.0, 1.0);
cubeVerts.push(right, top, back, 1.0, 0.0);
cubeVerts.push(right, top, front, 0.0, 0.0);
cubeVerts.push(right, bottom, front, 0.0, 1.0);
// Back
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, bottom, back, 1.0, 1.0);
cubeVerts.push(right, bottom, back, 0.0, 1.0);
cubeVerts.push(right, top, back, 0.0, 0.0);
cubeVerts.push(left, top, back, 1.0, 0.0);
// Front
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(left, bottom, front, 0.0, 1.0);
cubeVerts.push(right, bottom, front, 1.0, 1.0);
cubeVerts.push(right, top, front, 1.0, 0.0);
cubeVerts.push(left, top, front, 0.0, 0.0);
}
// Appends a cube with the given centerpoint and size.
function appendCube (x, y, z, size) {
var halfSize = size * 0.5;
appendBox(x - halfSize, y - halfSize, z - halfSize,
x + halfSize, y + halfSize, z + halfSize);
}
// Main "island", covers where the user can safely stand. Top of the cube
// (the ground the user stands on) should be at Y=0 to align with users
// floor. X=0 and Z=0 should be at the center of the users play space.
appendBox(-width * 0.5, -width, -depth * 0.5, width * 0.5, 0, depth * 0.5);
// A sprinkling of other cubes to make things more visually interesting.
appendCube(1.1, 0.3, (-depth * 0.5) - 0.8, 0.5);
appendCube(-0.5, 1.0, (-depth * 0.5) - 0.9, 0.75);
appendCube(0.6, 1.5, (-depth * 0.5) - 0.6, 0.4);
appendCube(-1.0, 0.5, (-depth * 0.5) - 0.5, 0.2);
appendCube((-width * 0.5) - 0.8, 0.3, -1.1, 0.5);
appendCube((-width * 0.5) - 0.9, 1.0, 0.5, 0.75);
appendCube((-width * 0.5) - 0.6, 1.5, -0.6, 0.4);
appendCube((-width * 0.5) - 0.5, 0.5, 1.0, 0.2);
appendCube((width * 0.5) + 0.8, 0.3, 1.1, 0.5);
appendCube((width * 0.5) + 0.9, 1.0, -0.5, 0.75);
appendCube((width * 0.5) + 0.6, 1.5, 0.6, 0.4);
appendCube((width * 0.5) + 0.5, 0.5, -1.0, 0.2);
appendCube(1.1, 1.4, (depth * 0.5) + 0.8, 0.5);
appendCube(-0.5, 1.0, (depth * 0.5) + 0.9, 0.75);
appendCube(0.6, 0.4, (depth * 0.5) + 0.6, 0.4);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
this.indexCount = cubeIndices.length;
};
CubeIsland.prototype.render = function (projectionMat, modelViewMat, stats) {
var gl = this.gl;
var program = this.program;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
if (stats) {
// To ensure that the FPS counter is visible in VR mode we have to
// render it as part of the scene.
mat4.fromTranslation(this.statsMat, [0, 1.5, -this.depth * 0.5]);
mat4.scale(this.statsMat, this.statsMat, [0.5, 0.5, 0.5]);
mat4.rotateX(this.statsMat, this.statsMat, -0.75);
mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
stats.render(projectionMat, this.statsMat);
}
};
return CubeIsland;
})();

View file

@ -0,0 +1,188 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
window.VRCubeSea = (function () {
"use strict";
var cubeSeaVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var cubeSeaFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var CubeSea = function (gl, texture) {
this.gl = gl;
this.statsMat = mat4.create();
this.texture = texture;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(cubeSeaVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(cubeSeaFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
var cubeVerts = [];
var cubeIndices = [];
// Build a single cube.
function appendCube (x, y, z) {
if (!x && !y && !z) {
// Don't create a cube in the center.
return;
}
var size = 0.2;
// Bottom
var idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y - size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y - size, z + size, 0.0, 0.0);
// Top
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0);
cubeVerts.push(x + size, y + size, z + size, 1.0, 1.0);
cubeVerts.push(x - size, y + size, z + size, 0.0, 1.0);
// Left
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x - size, y + size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y - size, z + size, 1.0, 1.0);
// Right
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0);
cubeVerts.push(x + size, y + size, z + size, 0.0, 0.0);
cubeVerts.push(x + size, y - size, z + size, 0.0, 1.0);
// Back
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y - size, z - size, 1.0, 1.0);
cubeVerts.push(x + size, y - size, z - size, 0.0, 1.0);
cubeVerts.push(x + size, y + size, z - size, 0.0, 0.0);
cubeVerts.push(x - size, y + size, z - size, 1.0, 0.0);
// Front
idx = cubeVerts.length / 5.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x - size, y - size, z + size, 0.0, 1.0);
cubeVerts.push(x + size, y - size, z + size, 1.0, 1.0);
cubeVerts.push(x + size, y + size, z + size, 1.0, 0.0);
cubeVerts.push(x - size, y + size, z + size, 0.0, 0.0);
}
var gridSize = 10;
// Build the cube sea
for (var x = 0; x < gridSize; ++x) {
for (var y = 0; y < gridSize; ++y) {
for (var z = 0; z < gridSize; ++z) {
appendCube(x - (gridSize / 2), y - (gridSize / 2), z - (gridSize / 2));
}
}
}
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
this.indexCount = cubeIndices.length;
};
var mortimer = mat4.create();
var a = [0.9868122935295105, -0.03754837438464165, -0.15745431184768677, 0, 0.011360996402800083, 0.9863911271095276, -0.1640235036611557, 0, 0.16147033870220184, 0.16007155179977417, 0.9738093614578247, 0, 0.192538782954216, 0.024526841938495636, -0.001076754298992455, 1.0000001192092896];
for (var i = 0; i < 16; ++i) {
mortimer[i] = a[i];
}
CubeSea.prototype.render = function (projectionMat, modelViewMat, stats) {
var gl = this.gl;
var program = this.program;
//mat4.invert(mortimer, modelViewMat);
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
if (stats) {
// To ensure that the FPS counter is visible in VR mode we have to
// render it as part of the scene.
mat4.fromTranslation(this.statsMat, [0, -0.3, -0.5]);
mat4.scale(this.statsMat, this.statsMat, [0.3, 0.3, 0.3]);
mat4.rotateX(this.statsMat, this.statsMat, -0.75);
mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
stats.render(projectionMat, this.statsMat);
}
};
return CubeSea;
})();

View file

@ -0,0 +1,219 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
window.VRPanorama = (function () {
"use strict";
var panoVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var panoFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var Panorama = function (gl) {
this.gl = gl;
this.texture = gl.createTexture();
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(panoVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(panoFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
var panoVerts = [];
var panoIndices = [];
var radius = 2; // 2 meter radius sphere
var latSegments = 40;
var lonSegments = 40;
// Create the vertices
for (var i=0; i <= latSegments; ++i) {
var theta = i * Math.PI / latSegments;
var sinTheta = Math.sin(theta);
var cosTheta = Math.cos(theta);
for (var j=0; j <= lonSegments; ++j) {
var phi = j * 2 * Math.PI / lonSegments;
var sinPhi = Math.sin(phi);
var cosPhi = Math.cos(phi);
var x = sinPhi * sinTheta;
var y = cosTheta;
var z = -cosPhi * sinTheta;
var u = (j / lonSegments);
var v = (i / latSegments);
panoVerts.push(x * radius, y * radius, z * radius, u, v);
}
}
// Create the indices
for (var i = 0; i < latSegments; ++i) {
var offset0 = i * (lonSegments+1);
var offset1 = (i+1) * (lonSegments+1);
for (var j = 0; j < lonSegments; ++j) {
var index0 = offset0+j;
var index1 = offset1+j;
panoIndices.push(
index0, index1, index0+1,
index1, index1+1, index0+1
);
}
}
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(panoVerts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(panoIndices), gl.STATIC_DRAW);
this.indexCount = panoIndices.length;
this.imgElement = null;
this.videoElement = null;
};
Panorama.prototype.setImage = function (url) {
var gl = this.gl;
var self = this;
return new Promise(function(resolve, reject) {
var img = new Image();
img.addEventListener('load', function() {
self.imgElement = img;
self.videoElement = null;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, img);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
resolve(self.imgElement);
});
img.addEventListener('error', function(ev) {
console.error(ev.message);
reject(ev.message);
}, false);
img.crossOrigin = 'anonymous';
img.src = url;
});
};
Panorama.prototype.setVideo = function (url) {
var gl = this.gl;
var self = this;
return new Promise(function(resolve, reject) {
var video = document.createElement('video');
video.addEventListener('canplay', function() {
// Added "click to play" UI?
});
video.addEventListener('playing', function() {
self.videoElement = video;
self.imgElement = null;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, self.videoElement);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
resolve(self.videoElement);
});
video.addEventListener('error', function(ev) {
console.error(video.error);
reject(video.error);
}, false);
video.loop = true;
video.autoplay = true;
video.crossOrigin = 'anonymous';
video.setAttribute('webkit-playsinline', '');
video.src = url;
});
};
Panorama.prototype.play = function() {
if (this.videoElement)
this.videoElement.play();
};
Panorama.prototype.pause = function() {
if (this.videoElement)
this.videoElement.pause();
};
Panorama.prototype.isPaused = function() {
if (this.videoElement)
return this.videoElement.paused;
return false;
};
Panorama.prototype.render = function (projectionMat, modelViewMat) {
var gl = this.gl;
var program = this.program;
if (!this.imgElement && !this.videoElement)
return;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
if (this.videoElement && !this.videoElement.paused) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.videoElement);
}
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
};
return Panorama;
})();

View file

@ -0,0 +1,181 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
window.VRSamplesUtil = (function () {
"use strict";
// Lifted from the WebVR Polyfill
function isMobile () {
return /Android/i.test(navigator.userAgent) ||
/iPhone|iPad|iPod/i.test(navigator.userAgent);
}
function getMessageContainer () {
var messageContainer = document.getElementById("vr-sample-message-container");
if (!messageContainer) {
messageContainer = document.createElement("div");
messageContainer.id = "vr-sample-message-container";
messageContainer.style.fontFamily = "sans-serif";
messageContainer.style.position = "absolute";
messageContainer.style.zIndex = "999";
messageContainer.style.left = "0";
messageContainer.style.top = "0";
messageContainer.style.right = "0";
messageContainer.style.margin = "0";
messageContainer.style.padding = "0";
messageContainer.align = "center";
document.body.appendChild(messageContainer);
}
return messageContainer;
}
function addMessageElement (message, backgroundColor) {
var messageElement = document.createElement("div");
messageElement.classList.add = "vr-sample-message";
messageElement.style.color = "#FFF";
messageElement.style.backgroundColor = backgroundColor;
messageElement.style.borderRadius = "3px";
messageElement.style.position = "relative";
messageElement.style.display = "inline-block";
messageElement.style.margin = "0.5em";
messageElement.style.padding = "0.75em";
messageElement.innerHTML = message;
getMessageContainer().appendChild(messageElement);
return messageElement;
}
// Makes the given element fade out and remove itself from the DOM after the
// given timeout.
function makeToast (element, timeout) {
element.style.transition = "opacity 0.5s ease-in-out";
element.style.opacity = "1";
setTimeout(function () {
element.style.opacity = "0";
setTimeout(function () {
if (element.parentElement)
element.parentElement.removeChild(element);
}, 500);
}, timeout);
}
function addError (message, timeout) {
var element = addMessageElement("<b>ERROR:</b> " + message, "#D33");
if (timeout) {
makeToast(element, timeout);
}
return element;
}
function addInfo (message, timeout) {
var element = addMessageElement(message, "#22A");
if (timeout) {
makeToast(element, timeout);
}
return element;
}
function getButtonContainer () {
var buttonContainer = document.getElementById("vr-sample-button-container");
if (!buttonContainer) {
buttonContainer = document.createElement("div");
buttonContainer.id = "vr-sample-button-container";
buttonContainer.style.fontFamily = "sans-serif";
buttonContainer.style.position = "absolute";
buttonContainer.style.zIndex = "999";
buttonContainer.style.left = "0";
buttonContainer.style.bottom = "0";
buttonContainer.style.right = "0";
buttonContainer.style.margin = "0";
buttonContainer.style.padding = "0";
buttonContainer.align = "right";
document.body.appendChild(buttonContainer);
}
return buttonContainer;
}
function addButtonElement (message, key, icon) {
var buttonElement = document.createElement("div");
buttonElement.classList.add = "vr-sample-button";
buttonElement.style.color = "#FFF";
buttonElement.style.fontWeight = "bold";
buttonElement.style.backgroundColor = "#888";
buttonElement.style.borderRadius = "5px";
buttonElement.style.border = "3px solid #555";
buttonElement.style.position = "relative";
buttonElement.style.display = "inline-block";
buttonElement.style.margin = "0.5em";
buttonElement.style.padding = "0.75em";
buttonElement.style.cursor = "pointer";
buttonElement.align = "center";
if (icon) {
buttonElement.innerHTML = "<img src='" + icon + "'/><br/>" + message;
} else {
buttonElement.innerHTML = message;
}
if (key) {
var keyElement = document.createElement("span");
keyElement.classList.add = "vr-sample-button-accelerator";
keyElement.style.fontSize = "0.75em";
keyElement.style.fontStyle = "italic";
keyElement.innerHTML = " (" + key + ")";
buttonElement.appendChild(keyElement);
}
getButtonContainer().appendChild(buttonElement);
return buttonElement;
}
function addButton (message, key, icon, callback) {
var keyListener = null;
if (key) {
var keyCode = key.charCodeAt(0);
keyListener = function (event) {
if (event.keyCode === keyCode) {
callback(event);
}
};
document.addEventListener("keydown", keyListener, false);
}
var element = addButtonElement(message, key, icon);
element.addEventListener("click", function (event) {
callback(event);
event.preventDefault();
}, false);
return {
element: element,
keyListener: keyListener
};
}
function removeButton (button) {
if (!button)
return;
if (button.element.parentElement)
button.element.parentElement.removeChild(button.element);
if (button.keyListener)
document.removeEventListener("keydown", button.keyListener, false);
}
return {
isMobile: isMobile,
addError: addError,
addInfo: addInfo,
addButton: addButton,
removeButton: removeButton,
makeToast: makeToast
};
})();