console: Fix rendering of VGA underline

vga_putcharxy()'s underline code sets font_data to 0xffff instead of
0xff.  vga_putcharxy() then reads dmask16[0xffff >> 4] and
dmask4[0xffff >> 6].  In practice, these out-of-bounds subscripts
"only" put a few crap bits into the display surface.

For 32 bit pixels, there's no array access.  font_data's extra bits go
straight into the display surface.

Broken when commit 6d6f7c28 implemented underline.

Spotted by Coverity.

Signed-off-by: Markus Armbruster <armbru@redhat.com>
Signed-off-by: Anthony Liguori <aliguori@us.ibm.com>
This commit is contained in:
Markus Armbruster 2011-11-04 10:38:29 +01:00 committed by Anthony Liguori
parent f54c556c08
commit 439229c7cb

View File

@ -467,7 +467,7 @@ static void vga_putcharxy(DisplayState *ds, int x, int y, int ch,
font_data = *font_ptr++;
if (t_attrib->uline
&& ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) {
font_data = 0xFFFF;
font_data = 0xFF;
}
((uint32_t *)d)[0] = (dmask16[(font_data >> 4)] & xorcol) ^ bgcol;
((uint32_t *)d)[1] = (dmask16[(font_data >> 0) & 0xf] & xorcol) ^ bgcol;
@ -480,7 +480,7 @@ static void vga_putcharxy(DisplayState *ds, int x, int y, int ch,
font_data = *font_ptr++;
if (t_attrib->uline
&& ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) {
font_data = 0xFFFF;
font_data = 0xFF;
}
((uint32_t *)d)[0] = (dmask4[(font_data >> 6)] & xorcol) ^ bgcol;
((uint32_t *)d)[1] = (dmask4[(font_data >> 4) & 3] & xorcol) ^ bgcol;
@ -493,7 +493,7 @@ static void vga_putcharxy(DisplayState *ds, int x, int y, int ch,
for(i = 0; i < FONT_HEIGHT; i++) {
font_data = *font_ptr++;
if (t_attrib->uline && ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) {
font_data = 0xFFFF;
font_data = 0xFF;
}
((uint32_t *)d)[0] = (-((font_data >> 7)) & xorcol) ^ bgcol;
((uint32_t *)d)[1] = (-((font_data >> 6) & 1) & xorcol) ^ bgcol;