Allow replacing the object cache with the guarded heap.

This allows to use the debug features of the guarded heap also on
allocations made through the object cache API. This is obivously
horrible for performance and uses up huge amounts of memory, so the
initial and grow sizes are adjusted accordingly.

Note that this is a rather simple hack, using the object_cache pointer
to transport the allocation size. The alignment is neglected completely.
This commit is contained in:
Michael Lotz 2011-12-04 13:52:06 +01:00
parent 390a6e2e02
commit 5cbe06f482
4 changed files with 121 additions and 1 deletions

View File

@ -114,6 +114,10 @@
#define USE_SLAB_ALLOCATOR_FOR_MALLOC 1
// Heap implementation based on the slab allocator (for production use).
// Replace the object cache with the guarded heap to force debug features. Also
// requires the use of the guarded heap for malloc.
#define USE_GUARDED_HEAP_FOR_OBJECT_CACHE 0
// Enables additional sanity checks in the slab allocator's memory manager.
#define DEBUG_SLAB_MEMORY_MANAGER_PARANOID_CHECKS 0

View File

@ -13,6 +13,15 @@
#include "kernel_debug_config.h"
#if USE_GUARDED_HEAP_FOR_MALLOC && USE_GUARDED_HEAP_FOR_OBJECT_CACHE
// This requires a lot of up-front memory to boot at all...
#define INITIAL_HEAP_SIZE 128 * 1024 * 1024
// ... and a lot of reserves to keep running.
#define HEAP_GROW_SIZE 128 * 1024 * 1024
#else // USE_GUARDED_HEAP_FOR_MALLOC && USE_GUARDED_HEAP_FOR_OBJECT_CACHE
// allocate 16MB initial heap for the kernel
#define INITIAL_HEAP_SIZE 16 * 1024 * 1024
// grow by another 4MB each time the heap runs out of memory
@ -22,6 +31,8 @@
// use areas for allocations bigger than 1MB
#define HEAP_AREA_USE_THRESHOLD 1 * 1024 * 1024
#endif // !(USE_GUARDED_HEAP_FOR_MALLOC && USE_GUARDED_HEAP_FOR_OBJECT_CACHE)
// allocation/deallocation flags for {malloc,free}_etc()
#define HEAP_DONT_WAIT_FOR_MEMORY 0x01

View File

@ -12,6 +12,7 @@
#include <debug.h>
#include <heap.h>
#include <malloc.h>
#include <slab/Slab.h>
#include <tracing.h>
#include <util/AutoLock.h>
#include <vm/vm.h>
@ -611,7 +612,7 @@ dump_guarded_heap_page(int argc, char** argv)
}
// #pragma mark - public API
// #pragma mark - Malloc API
status_t
@ -718,4 +719,101 @@ realloc(void* address, size_t newSize)
}
#if USE_GUARDED_HEAP_FOR_OBJECT_CACHE
// #pragma mark - Slab API
void
request_memory_manager_maintenance()
{
}
object_cache*
create_object_cache(const char*, size_t objectSize, size_t, void*,
object_cache_constructor, object_cache_destructor)
{
return (object_cache*)objectSize;
}
object_cache*
create_object_cache_etc(const char*, size_t objectSize, size_t, size_t, size_t,
size_t, uint32, void*, object_cache_constructor, object_cache_destructor,
object_cache_reclaimer)
{
return (object_cache*)objectSize;
}
void
delete_object_cache(object_cache* cache)
{
}
status_t
object_cache_set_minimum_reserve(object_cache* cache, size_t objectCount)
{
return B_OK;
}
void*
object_cache_alloc(object_cache* cache, uint32 flags)
{
return memalign_etc(0, (size_t)cache, flags);
}
void
object_cache_free(object_cache* cache, void* object, uint32 flags)
{
return free_etc(object, flags);
}
status_t
object_cache_reserve(object_cache* cache, size_t objectCount, uint32 flags)
{
return B_OK;
}
void
object_cache_get_usage(object_cache* cache, size_t* _allocatedMemory)
{
*_allocatedMemory = 0;
}
void
slab_init(kernel_args* args)
{
}
void
slab_init_post_area()
{
}
void
slab_init_post_sem()
{
}
void
slab_init_post_thread()
{
}
#endif // USE_GUARDED_HEAP_FOR_OBJECT_CACHE
#endif // USE_GUARDED_HEAP_FOR_MALLOC

View File

@ -36,6 +36,9 @@
#include "SmallObjectCache.h"
#if !USE_GUARDED_HEAP_FOR_OBJECT_CACHE
typedef DoublyLinkedList<ObjectCache> ObjectCacheList;
typedef DoublyLinkedList<ObjectCache,
@ -831,6 +834,7 @@ add_alloc_tracing_entry(ObjectCache* cache, uint32 flags, void* object)
#endif
}
// #pragma mark -
@ -1421,3 +1425,6 @@ slab_init_post_thread()
RANGE_MARKER_FUNCTION_END(Slab)
#endif // !USE_GUARDED_HEAP_FOR_OBJECT_CACHE