Fix extension querying when using WebGL2

This patch fixes a crash caused by using a deprecated GL call.
Starting with OpenGL 3 (used by WebGL2), the `glGetString(GL_EXTENSIONS)`
call is deprecated, and some drivers produce GL_INVALID_ENUM error.
Querying can be done by checking the number of extensions first,
then getting the extensions one by one.
This commit is contained in:
Mátyás Mustoha 2019-08-26 14:25:47 +02:00
parent 66e5ad0cb8
commit 736f6859b0

View file

@ -1721,8 +1721,19 @@ impl WebGLImpl {
chan.send(result).unwrap();
}
#[allow(unsafe_code)]
fn get_extensions(gl: &dyn gl::Gl, chan: &WebGLSender<String>) {
chan.send(gl.get_string(gl::EXTENSIONS)).unwrap();
let mut ext_count = [0];
unsafe {
gl.get_integer_v(gl::NUM_EXTENSIONS, &mut ext_count);
}
let ext_count = ext_count[0] as usize;
let mut extensions = Vec::with_capacity(ext_count);
for idx in 0..ext_count {
extensions.push(gl.get_string_i(gl::EXTENSIONS, idx as u32))
}
let extensions = extensions.join(" ");
chan.send(extensions).unwrap();
}
// https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.6