Properly check for invalid targets in gl.BindFramebuffer

It should always emit InvalidEnum, not InvalidOperation.
This commit is contained in:
Anthony Ramine 2018-04-05 17:39:53 +02:00
parent 044f19d914
commit 183cc4c035
2 changed files with 2 additions and 7 deletions

View file

@ -11,7 +11,6 @@ use canvas_traits::webgl::DOMToTextureCommand;
use canvas_traits::webgl::WebGLError::*;
use canvas_traits::webgl::webgl_channel;
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::WebGL2RenderingContextBinding::WebGL2RenderingContextConstants as WebGL2Constants;
use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::{self, WebGLContextAttributes};
use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants as constants;
use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextMethods;
@ -1576,12 +1575,8 @@ impl WebGLRenderingContextMethods for WebGLRenderingContext {
// https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.6
fn BindFramebuffer(&self, target: u32, framebuffer: Option<&WebGLFramebuffer>) {
if target == WebGL2Constants::READ_FRAMEBUFFER {
return self.webgl_error(InvalidEnum);
}
if target != constants::FRAMEBUFFER {
return self.webgl_error(InvalidOperation);
return self.webgl_error(InvalidEnum);
}
if let Some(framebuffer) = framebuffer {