gl-renderer: Set texture filters before setting input textures

Turns out gl_shader_config_set_input_textures is what adds the filter
parameters to the sconf, so we must set the filters before calling it.

Signed-off-by: Derek Foreman <derek.foreman@collabora.com>
This commit is contained in:
Derek Foreman 2025-10-21 11:26:39 -05:00
parent 21f1c575b3
commit 37db081f82

View file

@ -1485,8 +1485,6 @@ gl_shader_config_init_for_paint_node(struct gl_shader_config *sconf,
weston_matrix_translate(&sconf->surface_to_buffer, 0, 1, 0);
}
gl_shader_config_set_input_textures(sconf, gb);
filter = pnode->needs_filtering ? GL_LINEAR : GL_NEAREST;
for (i = 0; i < gb->num_textures; i++) {
if (filter != gb->parameters[i].filters.min) {
@ -1495,6 +1493,7 @@ gl_shader_config_init_for_paint_node(struct gl_shader_config *sconf,
gb->parameters[i].flags |= TEXTURE_FILTERS_DIRTY;
}
}
gl_shader_config_set_input_textures(sconf, gb);
if (!gl_shader_config_set_color_transform(gr, sconf, pnode->surf_xform.transform)) {
weston_log("GL-renderer: failed to generate a color transformation.\n");