renderer-gl: Fix wrong stride error when reading pixels

The gl_renderer_do_read_pixels() is expecting stride in bytes.

Signed-off-by: Jeffy Chen <jeffy.chen@rock-chips.com>
This commit is contained in:
Jeffy Chen 2023-12-28 16:40:30 +08:00
parent e74f2897b9
commit 623c7b5202
1 changed files with 5 additions and 3 deletions

View File

@ -2007,7 +2007,8 @@ gl_renderer_repaint_output(struct weston_output *output,
if (rb->pixels) {
uint32_t *pixels = rb->pixels;
int stride = go->fb_size.width;
int width = go->fb_size.width;
int stride = width * (compositor->read_format->bpp >> 3);
pixman_box32_t *extents = &rb->base.damage.extents;
struct weston_geometry rect = {
.x = go->area.x,
@ -2024,13 +2025,14 @@ gl_renderer_repaint_output(struct weston_output *output,
}
if (gr->gl_version >= gr_gl_version(3, 0) && ! gr->fan_debug) {
glPixelStorei(GL_PACK_ROW_LENGTH, stride);
glPixelStorei(GL_PACK_ROW_LENGTH, width);
rect.width = extents->x2 - extents->x1;
rect.x += extents->x1 - (int)output->pos.c.x;
pixels += extents->x1 - (int)output->pos.c.x;
}
gl_renderer_do_read_pixels(gr, compositor->read_format, pixels, stride, &rect);
gl_renderer_do_read_pixels(gr, compositor->read_format, pixels,
stride, &rect);
if (gr->gl_version >= gr_gl_version(3, 0))
glPixelStorei(GL_PACK_ROW_LENGTH, 0);