Fix curses on big endian hosts
On big endian hosts, the curses interface is unusable: the emulated graphic card only displays garbage, while the monitor interface displays nothing (or rather only spaces). The curses interface is waiting for data in native endianness, so console_write_ch() should not do any conversion. The conversion should be done when reading the video buffer in hw/vga.c. I supposed this buffer is in little endian mode, though it's not impossible that the data is actually in guest endianness. I currently have no big endian guest to way (they all switch to graphic mode immediately). Signed-off-by: Aurelien Jarno <aurelien@aurel32.net>
This commit is contained in:
parent
8a7d0890ac
commit
9ae19b657e
@ -329,7 +329,7 @@ static inline void console_write_ch(console_ch_t *dest, uint32_t ch)
|
||||
{
|
||||
if (!(ch & 0xff))
|
||||
ch |= ' ';
|
||||
cpu_to_le32wu((uint32_t *) dest, ch);
|
||||
*dest = ch;
|
||||
}
|
||||
|
||||
typedef void (*vga_hw_update_ptr)(void *);
|
||||
|
6
hw/vga.c
6
hw/vga.c
@ -2073,14 +2073,14 @@ static void vga_update_text(void *opaque, console_ch_t *chardata)
|
||||
|
||||
if (full_update) {
|
||||
for (i = 0; i < size; src ++, dst ++, i ++)
|
||||
console_write_ch(dst, VMEM2CHTYPE(*src));
|
||||
console_write_ch(dst, VMEM2CHTYPE(le32_to_cpu(*src)));
|
||||
|
||||
dpy_update(s->ds, 0, 0, width, height);
|
||||
} else {
|
||||
c_max = 0;
|
||||
|
||||
for (i = 0; i < size; src ++, dst ++, i ++) {
|
||||
console_write_ch(&val, VMEM2CHTYPE(*src));
|
||||
console_write_ch(&val, VMEM2CHTYPE(le32_to_cpu(*src)));
|
||||
if (*dst != val) {
|
||||
*dst = val;
|
||||
c_max = i;
|
||||
@ -2089,7 +2089,7 @@ static void vga_update_text(void *opaque, console_ch_t *chardata)
|
||||
}
|
||||
c_min = i;
|
||||
for (; i < size; src ++, dst ++, i ++) {
|
||||
console_write_ch(&val, VMEM2CHTYPE(*src));
|
||||
console_write_ch(&val, VMEM2CHTYPE(le32_to_cpu(*src)));
|
||||
if (*dst != val) {
|
||||
*dst = val;
|
||||
c_max = i;
|
||||
|
Loading…
Reference in New Issue
Block a user