Allocate the off-screen BitMap as a friend of the screen BitMap in the increasingly diminishing number of cases where this works correctly. This should stop NetSurf using memory on the wrong graphics card when two are installed.

This commit is contained in:
Chris Young 2013-04-28 12:21:39 +01:00
parent e4f11d1b74
commit 8c7eea4b62
1 changed files with 9 additions and 2 deletions

View File

@ -156,14 +156,21 @@ void ami_init_layers(struct gui_globals *gg, ULONG width, ULONG height)
ULONG depth = 32; ULONG depth = 32;
struct DrawInfo *dri; struct DrawInfo *dri;
struct BitMap *friend = NULL; /* Required to be NULL for Cairo and ARGB bitmaps */ struct BitMap *friend = NULL;
depth = GetBitMapAttr(scrn->RastPort.BitMap, BMA_DEPTH); depth = GetBitMapAttr(scrn->RastPort.BitMap, BMA_DEPTH);
if((depth < 16) || (nsoption_int(cairo_renderer) == -1)) { if((depth < 16) || (nsoption_int(cairo_renderer) == -1)) {
palette_mapped = true; palette_mapped = true;
// friend = scrn->RastPort.BitMap;
} else { } else {
palette_mapped = false; palette_mapped = false;
/* If we're not palette-mapping allocate using a friend BitMap if the
* depth is 32bpp. In all other cases using a friend BitMap causes a
* hard lockup or odd/missing graphical effects.
*/
if(depth == 32)
friend = scrn->RastPort.BitMap;
} }
if(nsoption_int(redraw_tile_size_x) <= 0) nsoption_set_int(redraw_tile_size_x, scrn->Width); if(nsoption_int(redraw_tile_size_x) <= 0) nsoption_set_int(redraw_tile_size_x, scrn->Width);