diff --git a/src/glsl/glsl_parser_extras.cpp b/src/glsl/glsl_parser_extras.cpp index 0b53232e9e7..7f8d47ce9bc 100644 --- a/src/glsl/glsl_parser_extras.cpp +++ b/src/glsl/glsl_parser_extras.cpp @@ -114,6 +114,9 @@ _mesa_glsl_parse_state::_mesa_glsl_parse_state(struct gl_context *ctx, } this->supported_version_string = supported; + + if (ctx->Const.ForceGLSLExtensionsWarn) + _mesa_glsl_process_extension("all", NULL, "warn", NULL, this); } const char * diff --git a/src/mesa/main/mtypes.h b/src/mesa/main/mtypes.h index bce5de23823..b86aeb6c1f1 100644 --- a/src/mesa/main/mtypes.h +++ b/src/mesa/main/mtypes.h @@ -2772,6 +2772,12 @@ struct gl_constants GLuint GLSLVersion; /**< GLSL version supported (ex: 120 = 1.20) */ + /** + * Changes default GLSL extension behavior from "error" to "warn". It's out + * of spec, but it can make some apps work that otherwise wouldn't. + */ + GLboolean ForceGLSLExtensionsWarn; + /** * Does the driver support real 32-bit integers? (Otherwise, integers are * simulated via floats.)