add zero initialization on zero sized reallocation (issue #725)

This commit is contained in:
daanx 2023-04-22 10:18:18 -07:00
parent e47adc2d22
commit e90ad987cc
1 changed files with 19 additions and 1 deletions

View File

@ -37,6 +37,11 @@ extern inline void* _mi_page_malloc(mi_heap_t* heap, mi_page_t* page, size_t siz
page->used++; page->used++;
page->free = mi_block_next(page, block); page->free = mi_block_next(page, block);
mi_assert_internal(page->free == NULL || _mi_ptr_page(page->free) == page); mi_assert_internal(page->free == NULL || _mi_ptr_page(page->free) == page);
#if MI_DEBUG>3
if (page->free_is_zero) {
mi_assert_expensive(mi_mem_is_zero(block+1,size - sizeof(*block)));
}
#endif
// allow use of the block internally // allow use of the block internally
// note: when tracking we need to avoid ever touching the MI_PADDING since // note: when tracking we need to avoid ever touching the MI_PADDING since
@ -53,7 +58,7 @@ extern inline void* _mi_page_malloc(mi_heap_t* heap, mi_page_t* page, size_t siz
} }
else { else {
_mi_memzero_aligned(block, page->xblock_size - MI_PADDING_SIZE); _mi_memzero_aligned(block, page->xblock_size - MI_PADDING_SIZE);
} }
} }
#if (MI_DEBUG>0) && !MI_TRACK_ENABLED && !MI_TSAN #if (MI_DEBUG>0) && !MI_TRACK_ENABLED && !MI_TSAN
@ -116,6 +121,11 @@ static inline mi_decl_restrict void* mi_heap_malloc_small_zero(mi_heap_t* heap,
mi_heap_stat_increase(heap, malloc, mi_usable_size(p)); mi_heap_stat_increase(heap, malloc, mi_usable_size(p));
} }
#endif #endif
#if MI_DEBUG>3
if (p != NULL && zero) {
mi_assert_expensive(mi_mem_is_zero(p, size));
}
#endif
return p; return p;
} }
@ -145,6 +155,11 @@ extern inline void* _mi_heap_malloc_zero_ex(mi_heap_t* heap, size_t size, bool z
mi_heap_stat_increase(heap, malloc, mi_usable_size(p)); mi_heap_stat_increase(heap, malloc, mi_usable_size(p));
} }
#endif #endif
#if MI_DEBUG>3
if (p != NULL && zero) {
mi_assert_expensive(mi_mem_is_zero(p, size));
}
#endif
return p; return p;
} }
} }
@ -702,6 +717,9 @@ void* _mi_heap_realloc_zero(mi_heap_t* heap, void* p, size_t newsize, bool zero)
const size_t start = (size >= sizeof(intptr_t) ? size - sizeof(intptr_t) : 0); const size_t start = (size >= sizeof(intptr_t) ? size - sizeof(intptr_t) : 0);
_mi_memzero((uint8_t*)newp + start, newsize - start); _mi_memzero((uint8_t*)newp + start, newsize - start);
} }
else if (newsize == 0) {
((uint8_t*)newp)[0] = 0; // work around for applications that expect zero-reallocation to be zero initialized (issue #725)
}
if mi_likely(p != NULL) { if mi_likely(p != NULL) {
const size_t copysize = (newsize > size ? size : newsize); const size_t copysize = (newsize > size ? size : newsize);
mi_track_mem_defined(p,copysize); // _mi_useable_size may be too large for byte precise memory tracking.. mi_track_mem_defined(p,copysize); // _mi_useable_size may be too large for byte precise memory tracking..