diff --git a/Zend/zend_alloc.c b/Zend/zend_alloc.c index 5a1c19e09d19d..82ac6535e0b08 100644 --- a/Zend/zend_alloc.c +++ b/Zend/zend_alloc.c @@ -83,9 +83,6 @@ typedef int pid_t; #include #include #include -#ifdef __SANITIZE_ADDRESS__ -# include -#endif #ifndef _WIN32 # include @@ -173,6 +170,80 @@ static size_t _real_page_size = ZEND_MM_PAGE_SIZE; } while (0) #endif +#ifdef __SANITIZE_ADDRESS__ + +/* + Poisoning uses the following rules: + * Always poison memory (re)allocated by private (non-ZEND_API) allocation functions before returning it + * Unpoison memory (re)allocated by public (ZEND_API) allocation functions before returning within the ZEND_API function + * Always poison freed memory + * Always poison unused memory during reallocation (where new_size < old_size) + * When accessing private heap structures and fields, always unpoison before accessing and repoison immediately after + * An exception to the above (for simplicity) is the main heap datastructure, + which is poisoned only when entering a ZEND_API function and repoisoned before exiting. + * When working with custom handlers, remember that the tracked_malloc implementation + removes the custom handlers, re-enters the current Zend allocator function, + then re-sets the custom handlers, so when invoking custom handlers always + remember that the heap may be poisoned after invocation. +*/ + +# include + +# if 0 +# define ZEND_MM_POISON_DEBUG(_type, _ptr, _size) do { \ + fprintf(stderr, "%s %p - %p in %d\n", (_type), (_ptr), (void*) (((size_t)_ptr)+((size_t)_size)), __LINE__); \ + fflush(stderr); \ +} while (0); +# else +# define ZEND_MM_POISON_DEBUG(_type, _ptr, _size) +# endif + +# define ZEND_MM_POISON(_ptr, _size) do { \ + if (UNEXPECTED(((size_t) (_ptr)) & ((size_t)7))) { \ + zend_mm_panic("Wrong alignment"); \ + } \ + ZEND_MM_POISON_DEBUG("Poisoning", (_ptr), (_size)); \ + ASAN_POISON_MEMORY_REGION((_ptr), (_size));\ +} while (0); +# define ZEND_MM_UNPOISON(_ptr, _size) do { \ + if (UNEXPECTED(((size_t) (_ptr)) & ((size_t)7))) { \ + zend_mm_panic("Wrong alignment"); \ + } \ + ZEND_MM_POISON_DEBUG("Unpoisoning", (_ptr), (_size)); \ + ASAN_UNPOISON_MEMORY_REGION((_ptr), (_size));\ +} while (0); + +# define ZEND_MM_POISON_HEAP(_ptr) ZEND_MM_POISON((_ptr), sizeof(zend_mm_heap)); +# define ZEND_MM_UNPOISON_HEAP(_ptr) ZEND_MM_UNPOISON((_ptr), sizeof(zend_mm_heap)); + +# define ZEND_MM_POISON_DEBUGINFO(_ptr) ZEND_MM_POISON((_ptr), sizeof(zend_mm_debug_info)); +# define ZEND_MM_UNPOISON_DEBUGINFO(_ptr) ZEND_MM_UNPOISON((_ptr), sizeof(zend_mm_debug_info)); + +# define ZEND_MM_POISON_CHUNK_HDR(_ptr, _heap) do { \ + ZEND_MM_POISON((_ptr), sizeof(zend_mm_chunk)); \ + ZEND_MM_UNPOISON_HEAP((_heap)); \ +} while (0); + +# define ZEND_MM_UNPOISON_CHUNK_HDR(_ptr) ZEND_MM_UNPOISON((_ptr), sizeof(zend_mm_chunk)); + +# define ZEND_MM_POISON_CHUNK(_ptr, _heap) do { \ + ZEND_MM_POISON((_ptr), ZEND_MM_CHUNK_SIZE); \ + ZEND_MM_UNPOISON_HEAP((_heap)); \ +} while (0); + +#else + +# define ZEND_MM_POISON(_ptr, _size) +# define ZEND_MM_UNPOISON(_ptr, _size) +# define ZEND_MM_POISON_HEAP(_ptr) +# define ZEND_MM_UNPOISON_HEAP(_ptr) +# define ZEND_MM_POISON_CHUNK_HDR(_ptr, _heap) +# define ZEND_MM_UNPOISON_CHUNK_HDR(_ptr) +# define ZEND_MM_POISON_CHUNK(_ptr, _heap) +# define ZEND_MM_POISON_DEBUGINFO(_ptr) +# define ZEND_MM_UNPOISON_DEBUGINFO(_ptr) + +#endif typedef uint32_t zend_mm_page_info; /* 4-byte integer */ typedef zend_ulong zend_mm_bitset; /* 4-byte or 8-byte integer */ @@ -417,8 +488,8 @@ static ZEND_COLD ZEND_NORETURN void zend_mm_safe_error(zend_mm_heap *heap, #endif size_t size) { - heap->overflow = 1; + ZEND_MM_POISON_HEAP(heap); zend_try { zend_error_noreturn(E_ERROR, format, @@ -430,7 +501,9 @@ static ZEND_COLD ZEND_NORETURN void zend_mm_safe_error(zend_mm_heap *heap, size); } zend_catch { } zend_end_try(); + ZEND_MM_UNPOISON_HEAP(heap); heap->overflow = 0; + ZEND_MM_POISON_HEAP(heap); zend_bailout(); exit(1); } @@ -452,12 +525,77 @@ static void stderr_last_error(char *msg) } #endif +static void _zend_mm_set_custom_handlers_ex(zend_mm_heap *heap, + void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void* (*_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + size_t (*_gc)(void), + void (*_shutdown)(bool, bool)) +{ +#if ZEND_MM_CUSTOM + zend_mm_heap *_heap = (zend_mm_heap*)heap; + + if (!_malloc && !_free && !_realloc) { + _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_NONE; + } else { + _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_STD; + _heap->custom_heap._malloc = _malloc; + _heap->custom_heap._free = _free; + _heap->custom_heap._realloc = _realloc; + _heap->custom_heap._gc = _gc; + _heap->custom_heap._shutdown = _shutdown; + } +#endif +} + +static void _zend_mm_get_custom_handlers_ex(zend_mm_heap *heap, + void* (**_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void (**_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void* (**_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + size_t (**_gc)(void), + void (**_shutdown)(bool, bool)) +{ +#if ZEND_MM_CUSTOM + zend_mm_heap *_heap = (zend_mm_heap*)heap; + + if (heap->use_custom_heap) { + *_malloc = _heap->custom_heap._malloc; + *_free = _heap->custom_heap._free; + *_realloc = _heap->custom_heap._realloc; + if (_gc != NULL) { + *_gc = _heap->custom_heap._gc; + } + if (_shutdown != NULL) { + *_shutdown = _heap->custom_heap._shutdown; + } + } else { + *_malloc = NULL; + *_free = NULL; + *_realloc = NULL; + if (_gc != NULL) { + *_gc = NULL; + } + if (_shutdown != NULL) { + *_shutdown = NULL; + } + } +#else + *_malloc = NULL; + *_free = NULL; + *_realloc = NULL; + *_gc = NULL; + *_shutdown = NULL; +#endif +} + + /*****************/ /* OS Allocation */ /*****************/ static void zend_mm_munmap(void *addr, size_t size) { + ZEND_MM_UNPOISON(addr, size); #ifdef _WIN32 if (VirtualFree(addr, 0, MEM_RELEASE) == 0) { /** ERROR_INVALID_ADDRESS is expected when addr is not range start address */ @@ -764,9 +902,7 @@ static void *zend_mm_chunk_alloc_int(size_t size, size_t alignment) if (zend_mm_use_huge_pages) { zend_mm_hugepage(ptr, size); } -#ifdef __SANITIZE_ADDRESS__ - ASAN_UNPOISON_MEMORY_REGION(ptr, size); -#endif + ZEND_MM_POISON(ptr, size); return ptr; } else { size_t offset; @@ -806,9 +942,7 @@ static void *zend_mm_chunk_alloc_int(size_t size, size_t alignment) if (zend_mm_use_huge_pages) { zend_mm_hugepage(ptr, size); } -# ifdef __SANITIZE_ADDRESS__ - ASAN_UNPOISON_MEMORY_REGION(ptr, size); -# endif + ZEND_MM_POISON(ptr, size); #endif return ptr; } @@ -820,6 +954,7 @@ static void *zend_mm_chunk_alloc(zend_mm_heap *heap, size_t size, size_t alignme if (UNEXPECTED(heap->storage)) { void *ptr = heap->storage->handlers.chunk_alloc(heap->storage, size, alignment); ZEND_ASSERT(((uintptr_t)((char*)ptr + (alignment-1)) & (alignment-1)) == (uintptr_t)ptr); + ZEND_MM_POISON(ptr, size); return ptr; } #endif @@ -830,6 +965,7 @@ static void zend_mm_chunk_free(zend_mm_heap *heap, void *addr, size_t size) { #if ZEND_MM_STORAGE if (UNEXPECTED(heap->storage)) { + ZEND_MM_UNPOISON(addr, size); heap->storage->handlers.chunk_free(heap->storage, addr, size); return; } @@ -842,6 +978,7 @@ static int zend_mm_chunk_truncate(zend_mm_heap *heap, void *addr, size_t old_siz #if ZEND_MM_STORAGE if (UNEXPECTED(heap->storage)) { if (heap->storage->handlers.chunk_truncate) { + ZEND_MM_UNPOISON((char*)addr + new_size, old_size - new_size); return heap->storage->handlers.chunk_truncate(heap->storage, addr, old_size, new_size); } else { return 0; @@ -887,7 +1024,10 @@ static zend_always_inline void zend_mm_chunk_init(zend_mm_heap *heap, zend_mm_ch { chunk->heap = heap; chunk->next = heap->main_chunk; - chunk->prev = heap->main_chunk->prev; + ZEND_MM_UNPOISON_CHUNK_HDR(heap->main_chunk); + zend_mm_chunk *prev = heap->main_chunk->prev; + ZEND_MM_UNPOISON_CHUNK_HDR(prev); + chunk->prev = prev; chunk->prev->next = chunk; chunk->next->prev = chunk; /* mark first pages as allocated */ @@ -898,6 +1038,8 @@ static zend_always_inline void zend_mm_chunk_init(zend_mm_heap *heap, zend_mm_ch /* mark first pages as allocated */ chunk->free_map[0] = (1L << ZEND_MM_FIRST_PAGE) - 1; chunk->map[0] = ZEND_MM_LRUN(ZEND_MM_FIRST_PAGE); + ZEND_MM_POISON_CHUNK_HDR(prev, heap); + ZEND_MM_POISON_CHUNK_HDR(heap->main_chunk, heap); } /***********************/ @@ -918,6 +1060,8 @@ static void zend_mm_change_huge_block_size(zend_mm_heap *heap, void *ptr, size_t /* Large Runs */ /**************/ +static size_t _zend_mm_gc(zend_mm_heap *heap); + #if ZEND_DEBUG static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) #else @@ -925,6 +1069,7 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F #endif { zend_mm_chunk *chunk = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); uint32_t page_num, len; int steps = 0; @@ -1047,12 +1192,14 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F get_chunk: if (heap->cached_chunks) { heap->cached_chunks_count--; + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); chunk = heap->cached_chunks; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); heap->cached_chunks = chunk->next; } else { #if ZEND_MM_LIMIT if (UNEXPECTED(ZEND_MM_CHUNK_SIZE > heap->limit - heap->real_size)) { - if (zend_mm_gc(heap)) { + if (_zend_mm_gc(heap)) { goto get_chunk; } else if (heap->overflow == 0) { #if ZEND_DEBUG @@ -1060,14 +1207,16 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F #else zend_mm_safe_error(heap, "Allowed memory size of %zu bytes exhausted (tried to allocate %zu bytes)", heap->limit, ZEND_MM_PAGE_SIZE * pages_count); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return NULL; } } #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); chunk = (zend_mm_chunk*)zend_mm_chunk_alloc(heap, ZEND_MM_CHUNK_SIZE, ZEND_MM_CHUNK_SIZE); if (UNEXPECTED(chunk == NULL)) { /* insufficient memory */ - if (zend_mm_gc(heap) && + if (_zend_mm_gc(heap) && (chunk = (zend_mm_chunk*)zend_mm_chunk_alloc(heap, ZEND_MM_CHUNK_SIZE, ZEND_MM_CHUNK_SIZE)) != NULL) { /* pass */ } else { @@ -1092,33 +1241,64 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F heap->real_size += ZEND_MM_CHUNK_SIZE; #endif + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); } heap->chunks_count++; if (heap->chunks_count > heap->peak_chunks_count) { heap->peak_chunks_count = heap->chunks_count; } zend_mm_chunk_init(heap, chunk); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); page_num = ZEND_MM_FIRST_PAGE; len = ZEND_MM_PAGES - ZEND_MM_FIRST_PAGE; goto found; } else { - chunk = chunk->next; + zend_mm_chunk *next_chunk = chunk->next; + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + chunk = next_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); steps++; } } found: if (steps > 2 && pages_count < 8) { +#ifdef __SANITIZE_ADDRESS__ + /* ifdef needed to avoid unused var warnings for the following vars */ + zend_mm_chunk *prev_chunk = chunk->prev; + zend_mm_chunk *next_chunk = chunk->next; + ZEND_MM_UNPOISON_CHUNK_HDR(next_chunk); + ZEND_MM_UNPOISON_CHUNK_HDR(prev_chunk); +#endif + ZEND_MM_CHECK(chunk->next->prev == chunk, "zend_mm_heap corrupted"); ZEND_MM_CHECK(chunk->prev->next == chunk, "zend_mm_heap corrupted"); /* move chunk into the head of the linked-list */ chunk->prev->next = chunk->next; chunk->next->prev = chunk->prev; + + +#ifdef __SANITIZE_ADDRESS__ + zend_mm_chunk *main_chunk = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(main_chunk); + zend_mm_chunk *main_next_chunk = heap->main_chunk->next; + ZEND_MM_UNPOISON_CHUNK_HDR(main_next_chunk); +#endif chunk->next = heap->main_chunk->next; chunk->prev = heap->main_chunk; chunk->prev->next = chunk; chunk->next->prev = chunk; +#ifdef __SANITIZE_ADDRESS__ + ZEND_MM_POISON_CHUNK_HDR(main_chunk, heap); + ZEND_MM_POISON_CHUNK_HDR(main_next_chunk, heap); +#endif + +#ifdef __SANITIZE_ADDRESS__ + ZEND_MM_POISON_CHUNK_HDR(next_chunk, heap); + ZEND_MM_POISON_CHUNK_HDR(prev_chunk, heap); +#endif + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); } /* mark run as allocated */ chunk->free_pages -= pages_count; @@ -1127,6 +1307,7 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F if (page_num == chunk->free_tail) { chunk->free_tail = page_num + pages_count; } + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return ZEND_MM_PAGE_ADDR(chunk, page_num); } @@ -1156,11 +1337,19 @@ static zend_never_inline void *zend_mm_alloc_large(zend_mm_heap *heap, size_t si static zend_always_inline void zend_mm_delete_chunk(zend_mm_heap *heap, zend_mm_chunk *chunk) { + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk->next); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk->prev); + ZEND_MM_CHECK(chunk->next->prev == chunk, "zend_mm_heap corrupted"); ZEND_MM_CHECK(chunk->prev->next == chunk, "zend_mm_heap corrupted"); chunk->next->prev = chunk->prev; chunk->prev->next = chunk->next; + + ZEND_MM_POISON_CHUNK_HDR(chunk->next, heap); + ZEND_MM_POISON_CHUNK_HDR(chunk->prev, heap); + heap->chunks_count--; if (heap->chunks_count + heap->cached_chunks_count < heap->avg_chunks_count + 0.1 || (heap->chunks_count == heap->last_chunks_delete_boundary @@ -1169,6 +1358,7 @@ static zend_always_inline void zend_mm_delete_chunk(zend_mm_heap *heap, zend_mm_ heap->cached_chunks_count++; chunk->next = heap->cached_chunks; heap->cached_chunks = chunk; + ZEND_MM_POISON_CHUNK(chunk, heap); } else { #if ZEND_MM_STAT || ZEND_MM_LIMIT heap->real_size -= ZEND_MM_CHUNK_SIZE; @@ -1181,19 +1371,31 @@ static zend_always_inline void zend_mm_delete_chunk(zend_mm_heap *heap, zend_mm_ heap->last_chunks_delete_count++; } } + + if (heap->cached_chunks) { + ZEND_MM_UNPOISON_CHUNK_HDR(heap->cached_chunks); + } + if (!heap->cached_chunks || chunk->num > heap->cached_chunks->num) { + if (heap->cached_chunks) { + ZEND_MM_POISON_CHUNK_HDR(heap->cached_chunks, heap); + } zend_mm_chunk_free(heap, chunk, ZEND_MM_CHUNK_SIZE); } else { //TODO: select the best chunk to delete??? + + ZEND_MM_UNPOISON_CHUNK_HDR(heap->cached_chunks); chunk->next = heap->cached_chunks->next; zend_mm_chunk_free(heap, heap->cached_chunks, ZEND_MM_CHUNK_SIZE); heap->cached_chunks = chunk; + ZEND_MM_POISON_CHUNK(chunk, heap); } } } static zend_always_inline void zend_mm_free_pages_ex(zend_mm_heap *heap, zend_mm_chunk *chunk, uint32_t page_num, uint32_t pages_count, int free_chunk) { + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); chunk->free_pages += pages_count; zend_mm_bitset_reset_range(chunk->free_map, page_num, pages_count); chunk->map[page_num] = 0; @@ -1201,8 +1403,13 @@ static zend_always_inline void zend_mm_free_pages_ex(zend_mm_heap *heap, zend_mm /* this setting may be not accurate */ chunk->free_tail = page_num; } - if (free_chunk && chunk != heap->main_chunk && chunk->free_pages == ZEND_MM_PAGES - ZEND_MM_FIRST_PAGE) { + bool should_free = free_chunk && chunk != heap->main_chunk && chunk->free_pages == ZEND_MM_PAGES - ZEND_MM_FIRST_PAGE; + ZEND_MM_POISON(ZEND_MM_PAGE_ADDR(chunk, page_num), pages_count * ZEND_MM_PAGE_SIZE); + + if (should_free) { zend_mm_delete_chunk(heap, chunk); + } else { + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); } } @@ -1323,27 +1530,46 @@ static zend_always_inline void zend_mm_set_next_free_slot(zend_mm_heap *heap, ui { ZEND_ASSERT(bin_data_size[bin_num] >= ZEND_MM_MIN_USEABLE_BIN_SIZE); + ZEND_MM_UNPOISON(slot, sizeof(zend_mm_free_slot*)); + ZEND_MM_UNPOISON(&ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num), sizeof(zend_mm_free_slot*)); + slot->next_free_slot = next; ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num) = zend_mm_encode_free_slot(heap, next); + + ZEND_MM_POISON(slot, sizeof(zend_mm_free_slot*)); + ZEND_MM_POISON(&ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num), sizeof(zend_mm_free_slot*)); } static zend_always_inline zend_mm_free_slot *zend_mm_get_next_free_slot(zend_mm_heap *heap, uint32_t bin_num, zend_mm_free_slot* slot) { + ZEND_MM_UNPOISON(slot, sizeof(zend_mm_free_slot*)); + ZEND_MM_UNPOISON(&ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num), sizeof(zend_mm_free_slot*)); + zend_mm_free_slot *next = slot->next_free_slot; - if (EXPECTED(next != NULL)) { - zend_mm_free_slot *shadow = ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num); - if (UNEXPECTED(next != zend_mm_decode_free_slot(heap, shadow))) { - zend_mm_panic("zend_mm_heap corrupted"); - } - } - return (zend_mm_free_slot*)next; + zend_mm_free_slot *shadow = ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num); + ZEND_MM_CHECK(next == zend_mm_decode_free_slot(heap, shadow), "zend_mm_heap corrupted"); + + ZEND_MM_POISON(slot, sizeof(zend_mm_free_slot*)); + ZEND_MM_POISON(&ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num), sizeof(zend_mm_free_slot*)); + + return next; } #else /* ZEND_MM_HEAP_PROTECTION */ # define zend_mm_set_next_free_slot(heap, bin_num, slot, next) do { \ - (slot)->next_free_slot = (next); \ + ZEND_MM_UNPOISON(slot, sizeof(zend_mm_free_slot*)); \ + (slot)->next_free_slot = (next); \ + ZEND_MM_POISON(slot, sizeof(zend_mm_free_slot*)); \ } while (0) -# define zend_mm_get_next_free_slot(heap, bin_num, slot) (slot)->next_free_slot + +static zend_always_inline zend_mm_free_slot *zend_mm_get_next_free_slot(zend_mm_heap *heap, uint32_t bin_num, zend_mm_free_slot* slot) +{ + ZEND_MM_UNPOISON(slot, sizeof(zend_mm_free_slot*)); + zend_mm_free_slot *next = slot->next_free_slot; + ZEND_MM_POISON(slot, sizeof(zend_mm_free_slot*)); + + return next; +} #endif /* ZEND_MM_HEAP_PROTECTION */ static zend_never_inline void *zend_mm_alloc_small_slow(zend_mm_heap *heap, uint32_t bin_num ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) @@ -1362,8 +1588,12 @@ static zend_never_inline void *zend_mm_alloc_small_slow(zend_mm_heap *heap, uint /* insufficient memory */ return NULL; } + /* bin is poisoned, do not unpoison inside this function */ + /* zend_mm_set_next_free_slot automatically unpoisons and repoisons before return */ chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(bin, ZEND_MM_CHUNK_SIZE); + + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); page_num = ZEND_MM_ALIGNED_OFFSET(bin, ZEND_MM_CHUNK_SIZE) / ZEND_MM_PAGE_SIZE; chunk->map[page_num] = ZEND_MM_SRUN(bin_num); if (bin_pages[bin_num] > 1) { @@ -1374,6 +1604,7 @@ static zend_never_inline void *zend_mm_alloc_small_slow(zend_mm_heap *heap, uint i++; } while (i < bin_pages[bin_num]); } + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); /* create a linked list of elements from 1 to last */ end = (zend_mm_free_slot*)((char*)bin + (bin_data_size[bin_num] * (bin_elements[bin_num] - 1))); @@ -1383,18 +1614,23 @@ static zend_never_inline void *zend_mm_alloc_small_slow(zend_mm_heap *heap, uint #if ZEND_DEBUG do { zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + bin_data_size[bin_num] - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = 0; + ZEND_MM_POISON_DEBUGINFO(dbg); } while (0); #endif + p = (zend_mm_free_slot*)((char*)p + bin_data_size[bin_num]); } while (p != end); - /* terminate list using NULL */ - p->next_free_slot = NULL; + zend_mm_set_next_free_slot(heap, bin_num, p, NULL); + #if ZEND_DEBUG do { zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + bin_data_size[bin_num] - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = 0; + ZEND_MM_POISON_DEBUGINFO(dbg); } while (0); #endif @@ -1418,6 +1654,7 @@ static zend_always_inline void *zend_mm_alloc_small(zend_mm_heap *heap, int bin_ if (EXPECTED(heap->free_slot[bin_num] != NULL)) { zend_mm_free_slot *p = heap->free_slot[bin_num]; heap->free_slot[bin_num] = zend_mm_get_next_free_slot(heap, bin_num, p); + return p; } else { return zend_mm_alloc_small_slow(heap, bin_num ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); @@ -1437,13 +1674,17 @@ static zend_always_inline void zend_mm_free_small(zend_mm_heap *heap, void *ptr, #if ZEND_DEBUG do { zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)ptr + bin_data_size[bin_num] - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = 0; + ZEND_MM_POISON_DEBUGINFO(dbg); } while (0); #endif p = (zend_mm_free_slot*)ptr; zend_mm_set_next_free_slot(heap, bin_num, p, heap->free_slot[bin_num]); heap->free_slot[bin_num] = p; + + ZEND_MM_POISON(p, bin_data_size[bin_num]); } /********/ @@ -1461,8 +1702,10 @@ static zend_always_inline zend_mm_debug_info *zend_mm_get_debug_info(zend_mm_hea ZEND_MM_CHECK(page_offset != 0, "zend_mm_heap corrupted"); chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); info = chunk->map[page_num]; ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); if (EXPECTED(info & ZEND_MM_IS_SRUN)) { int bin_num = ZEND_MM_SRUN_BIN_NUM(info); return (zend_mm_debug_info*)((char*)ptr + bin_data_size[bin_num] - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); @@ -1498,22 +1741,26 @@ static zend_always_inline void *zend_mm_alloc_heap(zend_mm_heap *heap, size_t si ptr = zend_mm_alloc_small(heap, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif return ptr; } else if (EXPECTED(size <= ZEND_MM_MAX_LARGE_SIZE)) { ptr = zend_mm_alloc_large(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif return ptr; } else { @@ -1535,9 +1782,11 @@ static zend_always_inline void zend_mm_free_heap(zend_mm_heap *heap, void *ptr Z } else { zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); int page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); zend_mm_page_info info = chunk->map[page_num]; ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); if (EXPECTED(info & ZEND_MM_IS_SRUN)) { zend_mm_free_small(heap, ptr, ZEND_MM_SRUN_BIN_NUM(info)); } else /* if (info & ZEND_MM_IS_LRUN) */ { @@ -1559,15 +1808,20 @@ static size_t zend_mm_size(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_ zend_mm_chunk *chunk; #if 0 && ZEND_DEBUG zend_mm_debug_info *dbg = zend_mm_get_debug_info(heap, ptr); - return dbg->size; + ZEND_MM_UNPOISON_DEBUGINFO(dbg); + size_t ret = dbg->size; + ZEND_MM_POISON_DEBUGINFO(dbg); + return ret; #else int page_num; zend_mm_page_info info; chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); info = chunk->map[page_num]; ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); if (EXPECTED(info & ZEND_MM_IS_SRUN)) { return bin_data_size[ZEND_MM_SRUN_BIN_NUM(info)]; } else /* if (info & ZEND_MM_IS_LARGE_RUN) */ { @@ -1586,6 +1840,8 @@ static zend_never_inline void *zend_mm_realloc_slow(zend_mm_heap *heap, void *pt size_t orig_peak = heap->peak; #endif ret = zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON(ret, size); + ZEND_MM_UNPOISON(ptr, copy_size); memcpy(ret, ptr, copy_size); zend_mm_free_heap(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #if ZEND_MM_STAT @@ -1627,6 +1883,7 @@ static zend_never_inline void *zend_mm_realloc_huge(zend_mm_heap *heap, void *pt #else zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #endif + ZEND_MM_POISON(ptr, new_size); return ptr; } else if (new_size < old_size) { /* unmup tail */ @@ -1642,12 +1899,13 @@ static zend_never_inline void *zend_mm_realloc_huge(zend_mm_heap *heap, void *pt #else zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #endif + ZEND_MM_POISON(ptr, old_size); return ptr; } } else /* if (new_size > old_size) */ { #if ZEND_MM_LIMIT if (UNEXPECTED(new_size - old_size > heap->limit - heap->real_size)) { - if (zend_mm_gc(heap) && new_size - old_size <= heap->limit - heap->real_size) { + if (_zend_mm_gc(heap) && new_size - old_size <= heap->limit - heap->real_size) { /* pass */ } else if (heap->overflow == 0) { #if ZEND_DEBUG @@ -1674,6 +1932,8 @@ static zend_never_inline void *zend_mm_realloc_huge(zend_mm_heap *heap, void *pt #else zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #endif + ZEND_MM_POISON(ptr, new_size); + return ptr; } } @@ -1695,12 +1955,16 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE); if (UNEXPECTED(page_offset == 0)) { if (EXPECTED(ptr == NULL)) { - return _zend_mm_alloc(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ret = _zend_mm_alloc(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); } else { - return zend_mm_realloc_huge(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ret = zend_mm_realloc_huge(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); } + ZEND_MM_UNPOISON(ret, size); + return ret; } else { zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + int page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE); zend_mm_page_info info = chunk->map[page_num]; #if ZEND_MM_HEAP_PROTECTION @@ -1728,11 +1992,15 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p /* truncation */ ret = zend_mm_alloc_small(heap, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); copy_size = use_copy_size ? MIN(size, copy_size) : size; + ZEND_MM_UNPOISON(ret, size); + ZEND_MM_UNPOISON(ptr, copy_size); memcpy(ret, ptr, copy_size); zend_mm_free_small(heap, ptr, old_bin_num); } else { /* reallocation in-place */ ret = ptr; + ZEND_MM_POISON(ret, old_size) + ZEND_MM_UNPOISON(ret, size); } } else if (size <= ZEND_MM_MAX_SMALL_SIZE) { /* small extension */ @@ -1743,6 +2011,8 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p #endif ret = zend_mm_alloc_small(heap, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); copy_size = use_copy_size ? MIN(old_size, copy_size) : old_size; + ZEND_MM_UNPOISON(ret, size); + ZEND_MM_UNPOISON(ptr, copy_size); memcpy(ret, ptr, copy_size); zend_mm_free_small(heap, ptr, old_bin_num); #if ZEND_MM_STAT @@ -1756,12 +2026,15 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ret); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return ret; } while (0); @@ -1773,12 +2046,16 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p if (new_size == old_size) { #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + ZEND_MM_UNPOISON(ptr, size); return ptr; } else if (new_size < old_size) { /* free tail pages */ @@ -1791,14 +2068,20 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p chunk->map[page_num] = ZEND_MM_LRUN(new_pages_count); chunk->free_pages += rest_pages_count; zend_mm_bitset_reset_range(chunk->free_map, page_num + new_pages_count, rest_pages_count); + ZEND_MM_POISON(ZEND_MM_PAGE_ADDR(chunk, page_num + new_pages_count), rest_pages_count * ZEND_MM_PAGE_SIZE); #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + ZEND_MM_POISON(ptr, old_size); + ZEND_MM_UNPOISON(ptr, size); return ptr; } else /* if (new_size > old_size) */ { int new_pages_count = (int)(new_size / ZEND_MM_PAGE_SIZE); @@ -1820,12 +2103,16 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p chunk->map[page_num] = ZEND_MM_LRUN(new_pages_count); #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + ZEND_MM_UNPOISON(ptr, size); return ptr; } } @@ -1834,10 +2121,13 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p #if ZEND_DEBUG size = real_size; #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); } copy_size = MIN(old_size, copy_size); - return zend_mm_realloc_slow(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ret = zend_mm_realloc_slow(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON(ret, size); + return ret; } /*********************/ @@ -1851,6 +2141,7 @@ static void zend_mm_add_huge_block(zend_mm_heap *heap, void *ptr, size_t size ZE #endif { zend_mm_huge_list *list = (zend_mm_huge_list*)zend_mm_alloc_heap(heap, sizeof(zend_mm_huge_list) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); list->ptr = ptr; list->size = size; list->next = heap->huge_list; @@ -1861,6 +2152,7 @@ static void zend_mm_add_huge_block(zend_mm_heap *heap, void *ptr, size_t size ZE list->dbg.lineno = __zend_lineno; list->dbg.orig_lineno = __zend_orig_lineno; #endif + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); heap->huge_list = list; } @@ -1869,11 +2161,14 @@ static size_t zend_mm_del_huge_block(zend_mm_heap *heap, void *ptr ZEND_FILE_LIN zend_mm_huge_list *prev = NULL; zend_mm_huge_list *list = heap->huge_list; while (list != NULL) { + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); if (list->ptr == ptr) { size_t size; if (prev) { + ZEND_MM_UNPOISON(prev, sizeof(zend_mm_huge_list)); prev->next = list->next; + ZEND_MM_POISON(prev, sizeof(zend_mm_huge_list)); } else { heap->huge_list = list->next; } @@ -1883,6 +2178,7 @@ static size_t zend_mm_del_huge_block(zend_mm_heap *heap, void *ptr ZEND_FILE_LIN } prev = list; list = list->next; + ZEND_MM_POISON(prev, sizeof(zend_mm_huge_list)); } ZEND_MM_CHECK(0, "zend_mm_heap corrupted"); return 0; @@ -1892,10 +2188,15 @@ static size_t zend_mm_get_huge_block_size(zend_mm_heap *heap, void *ptr ZEND_FIL { zend_mm_huge_list *list = heap->huge_list; while (list != NULL) { + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); if (list->ptr == ptr) { - return list->size; + size_t size = list->size; + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); + return size; } - list = list->next; + zend_mm_huge_list *next = list->next; + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); + list = next; } ZEND_MM_CHECK(0, "zend_mm_heap corrupted"); return 0; @@ -1909,6 +2210,7 @@ static void zend_mm_change_huge_block_size(zend_mm_heap *heap, void *ptr, size_t { zend_mm_huge_list *list = heap->huge_list; while (list != NULL) { + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); if (list->ptr == ptr) { list->size = size; #if ZEND_DEBUG @@ -1918,9 +2220,12 @@ static void zend_mm_change_huge_block_size(zend_mm_heap *heap, void *ptr, size_t list->dbg.lineno = __zend_lineno; list->dbg.orig_lineno = __zend_orig_lineno; #endif + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); return; } - list = list->next; + zend_mm_huge_list *next = list->next; + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); + list = next; } } @@ -1944,7 +2249,7 @@ static void *zend_mm_alloc_huge(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_D #if ZEND_MM_LIMIT if (UNEXPECTED(new_size > heap->limit - heap->real_size)) { - if (zend_mm_gc(heap) && new_size <= heap->limit - heap->real_size) { + if (_zend_mm_gc(heap) && new_size <= heap->limit - heap->real_size) { /* pass */ } else if (heap->overflow == 0) { #if ZEND_DEBUG @@ -1959,7 +2264,7 @@ static void *zend_mm_alloc_huge(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_D ptr = zend_mm_chunk_alloc(heap, new_size, ZEND_MM_CHUNK_SIZE); if (UNEXPECTED(ptr == NULL)) { /* insufficient memory */ - if (zend_mm_gc(heap) && + if (_zend_mm_gc(heap) && (ptr = zend_mm_chunk_alloc(heap, new_size, ZEND_MM_CHUNK_SIZE)) != NULL) { /* pass */ } else { @@ -2030,6 +2335,7 @@ static void zend_mm_init_key(zend_mm_heap *heap) static zend_mm_heap *zend_mm_init(void) { zend_mm_chunk *chunk = (zend_mm_chunk*)zend_mm_chunk_alloc_int(ZEND_MM_CHUNK_SIZE, ZEND_MM_CHUNK_SIZE); + zend_mm_heap *heap; if (UNEXPECTED(chunk == NULL)) { @@ -2038,6 +2344,7 @@ static zend_mm_heap *zend_mm_init(void) #endif return NULL; } + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); heap = &chunk->heap_slot; chunk->heap = heap; chunk->next = chunk; @@ -2076,10 +2383,12 @@ static zend_mm_heap *zend_mm_init(void) #endif heap->huge_list = NULL; heap->pid = getpid(); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return heap; } -ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) +// heap must be unpoisoned when entering, must remain unpoisoned before leaving +static size_t _zend_mm_gc(zend_mm_heap *heap) { zend_mm_free_slot *p, *q; zend_mm_chunk *chunk; @@ -2094,7 +2403,13 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) if (heap->use_custom_heap) { size_t (*gc)(void) = heap->custom_heap._gc; if (gc) { - return gc(); + size_t ret = gc(); + // Unpoison to handle re-entrant calls + // i.e. via the tracked allocator, which + // re-invokes the ZEND_API zend_mm_gc which + // poisons the heap before exiting. + ZEND_MM_UNPOISON_HEAP(heap); + return ret; } return 0; } @@ -2105,6 +2420,8 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) p = heap->free_slot[i]; while (p != NULL) { chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(p, ZEND_MM_CHUNK_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); page_offset = ZEND_MM_ALIGNED_OFFSET(p, ZEND_MM_CHUNK_SIZE); ZEND_ASSERT(page_offset != 0); @@ -2123,6 +2440,8 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) has_free_pages = true; } chunk->map[page_num] = ZEND_MM_SRUN_EX(i, free_counter); + + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); p = zend_mm_get_next_free_slot(heap, i, p); } @@ -2134,6 +2453,8 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) p = q->next_free_slot; while (p != NULL) { chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(p, ZEND_MM_CHUNK_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); page_offset = ZEND_MM_ALIGNED_OFFSET(p, ZEND_MM_CHUNK_SIZE); ZEND_ASSERT(page_offset != 0); @@ -2146,6 +2467,8 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) ZEND_ASSERT(info & ZEND_MM_IS_SRUN); ZEND_ASSERT(!(info & ZEND_MM_IS_LRUN)); } + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + ZEND_ASSERT(ZEND_MM_SRUN_BIN_NUM(info) == i); if (ZEND_MM_SRUN_FREE_COUNTER(info) == bin_elements[i]) { /* remove from cache */ @@ -2167,6 +2490,7 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) } chunk = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); do { i = ZEND_MM_FIRST_PAGE; while (i < chunk->free_tail) { @@ -2179,6 +2503,7 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) if (ZEND_MM_SRUN_FREE_COUNTER(info) == bin_elements[bin_num]) { /* all elements are free */ zend_mm_free_pages_ex(heap, chunk, i, pages_count, 0); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); collected += pages_count; } else { /* reset counter */ @@ -2197,14 +2522,27 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) zend_mm_delete_chunk(heap, chunk); chunk = next_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); } else { - chunk = chunk->next; + zend_mm_chunk *next_chunk = chunk->next; + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + chunk = next_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); } } while (chunk != heap->main_chunk); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return collected * ZEND_MM_PAGE_SIZE; } +ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) +{ + ZEND_MM_UNPOISON_HEAP(heap); + size_t ret = _zend_mm_gc(heap); + ZEND_MM_POISON_HEAP(heap); + return ret; +} + #if ZEND_DEBUG /******************/ /* Leak detection */ @@ -2218,6 +2556,7 @@ static zend_long zend_mm_find_leaks_small(zend_mm_chunk *p, uint32_t i, uint32_t zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + ZEND_MM_PAGE_SIZE * i + bin_data_size[bin_num] * (j + 1) - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); while (j < bin_elements[bin_num]) { + ZEND_MM_UNPOISON_DEBUGINFO(dbg); if (dbg->size != 0) { if (dbg->filename == leak->filename && dbg->lineno == leak->lineno) { count++; @@ -2229,8 +2568,10 @@ static zend_long zend_mm_find_leaks_small(zend_mm_chunk *p, uint32_t i, uint32_t } } j++; + ZEND_MM_POISON_DEBUGINFO(dbg); dbg = (zend_mm_debug_info*)((char*)dbg + bin_data_size[bin_num]); } + if (empty) { zend_mm_bitset_reset_range(p->free_map, i, bin_pages[bin_num]); } @@ -2251,10 +2592,13 @@ static zend_long zend_mm_find_leaks(zend_mm_heap *heap, zend_mm_chunk *p, uint32 } else /* if (p->map[i] & ZEND_MM_IS_LRUN) */ { int pages_count = ZEND_MM_LRUN_PAGES(p->map[i]); zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + ZEND_MM_PAGE_SIZE * (i + pages_count) - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); - + + ZEND_MM_UNPOISON_DEBUGINFO(dbg); if (dbg->filename == leak->filename && dbg->lineno == leak->lineno) { count++; } + ZEND_MM_POISON_DEBUGINFO(dbg); + zend_mm_bitset_reset_range(p->free_map, i, pages_count); i += pages_count; } @@ -2262,9 +2606,14 @@ static zend_long zend_mm_find_leaks(zend_mm_heap *heap, zend_mm_chunk *p, uint32 i++; } } - p = p->next; + zend_mm_chunk *next = p->next; + ZEND_MM_POISON_CHUNK_HDR(p, heap); + p = next; + ZEND_MM_UNPOISON_CHUNK_HDR(p); + i = ZEND_MM_FIRST_PAGE; } while (p != heap->main_chunk); + ZEND_MM_POISON_CHUNK_HDR(p, heap); return count; } @@ -2302,6 +2651,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) list = heap->huge_list; while (list) { zend_mm_huge_list *q = list; + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); leak.addr = list->ptr; leak.size = list->dbg.size; @@ -2325,6 +2675,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) /* for each chunk */ p = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(p); do { i = ZEND_MM_FIRST_PAGE; while (i < p->free_tail) { @@ -2335,6 +2686,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) j = 0; while (j < bin_elements[bin_num]) { + ZEND_MM_UNPOISON_DEBUGINFO(dbg); if (dbg->size != 0) { leak.addr = (zend_mm_debug_info*)((char*)p + ZEND_MM_PAGE_SIZE * i + bin_data_size[bin_num] * j); leak.size = dbg->size; @@ -2350,13 +2702,18 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) dbg->filename = NULL; dbg->lineno = 0; - repeated = zend_mm_find_leaks_small(p, i, j + 1, &leak) + - zend_mm_find_leaks(heap, p, i + bin_pages[bin_num], &leak); + repeated = zend_mm_find_leaks_small(p, i, j + 1, &leak); + ZEND_MM_UNPOISON_CHUNK_HDR(p); + + repeated += zend_mm_find_leaks(heap, p, i + bin_pages[bin_num], &leak); + ZEND_MM_UNPOISON_CHUNK_HDR(p); + total += 1 + repeated; if (repeated) { zend_message_dispatcher(ZMSG_MEMORY_LEAK_REPEATED, (void *)(uintptr_t)repeated); } } + ZEND_MM_POISON_DEBUGINFO(dbg); dbg = (zend_mm_debug_info*)((char*)dbg + bin_data_size[bin_num]); j++; } @@ -2364,6 +2721,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) } else /* if (p->map[i] & ZEND_MM_IS_LRUN) */ { int pages_count = ZEND_MM_LRUN_PAGES(p->map[i]); zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + ZEND_MM_PAGE_SIZE * (i + pages_count) - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); leak.addr = (void*)((char*)p + ZEND_MM_PAGE_SIZE * i); leak.size = dbg->size; @@ -2371,6 +2729,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) leak.orig_filename = dbg->orig_filename; leak.lineno = dbg->lineno; leak.orig_lineno = dbg->orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); zend_message_dispatcher(ZMSG_LOG_SCRIPT_NAME, NULL); zend_message_dispatcher(ZMSG_MEMORY_LEAK_DETECTED, &leak); @@ -2378,6 +2737,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) zend_mm_bitset_reset_range(p->free_map, i, pages_count); repeated = zend_mm_find_leaks(heap, p, i + pages_count, &leak); + ZEND_MM_UNPOISON_CHUNK_HDR(p); total += 1 + repeated; if (repeated) { zend_message_dispatcher(ZMSG_MEMORY_LEAK_REPEATED, (void *)(uintptr_t)repeated); @@ -2388,7 +2748,10 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) i++; } } - p = p->next; + zend_mm_chunk *next = p->next; + ZEND_MM_POISON_CHUNK_HDR(p, heap); + p = next; + ZEND_MM_UNPOISON_CHUNK_HDR(p); } while (p != heap->main_chunk); if (total) { zend_message_dispatcher(ZMSG_MEMORY_LEAKS_GRAND_TOTAL, &total); @@ -2416,6 +2779,7 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) { zend_mm_chunk *p; zend_mm_huge_list *list; + ZEND_MM_UNPOISON_HEAP(heap); #if ZEND_MM_CUSTOM if (heap->use_custom_heap) { @@ -2444,6 +2808,7 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) if (shutdown) { shutdown(full, silent); } + ZEND_MM_POISON_HEAP(heap); return; } @@ -2463,25 +2828,33 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) heap->huge_list = NULL; while (list) { zend_mm_huge_list *q = list; + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); list = list->next; zend_mm_chunk_free(heap, q->ptr, q->size); } /* move all chunks except of the first one into the cache */ + ZEND_MM_UNPOISON_CHUNK_HDR(heap->main_chunk); p = heap->main_chunk->next; while (p != heap->main_chunk) { + ZEND_MM_UNPOISON_CHUNK_HDR(p); zend_mm_chunk *q = p->next; p->next = heap->cached_chunks; heap->cached_chunks = p; + ZEND_MM_POISON_CHUNK(p, heap); p = q; + ZEND_MM_UNPOISON_CHUNK_HDR(p); heap->chunks_count--; heap->cached_chunks_count++; } + ZEND_MM_POISON_CHUNK_HDR(heap->main_chunk, heap); + if (full) { /* free all cached chunks */ while (heap->cached_chunks) { p = heap->cached_chunks; + ZEND_MM_UNPOISON_CHUNK_HDR(p); heap->cached_chunks = p->next; zend_mm_chunk_free(heap, p, ZEND_MM_CHUNK_SIZE); } @@ -2493,6 +2866,7 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) while ((double)heap->cached_chunks_count + 0.9 > heap->avg_chunks_count && heap->cached_chunks) { p = heap->cached_chunks; + ZEND_MM_UNPOISON_CHUNK_HDR(p); heap->cached_chunks = p->next; zend_mm_chunk_free(heap, p, ZEND_MM_CHUNK_SIZE); heap->cached_chunks_count--; @@ -2500,14 +2874,17 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) /* clear cached chunks */ p = heap->cached_chunks; while (p != NULL) { + ZEND_MM_UNPOISON_CHUNK_HDR(p); zend_mm_chunk *q = p->next; memset(p, 0, sizeof(zend_mm_chunk)); p->next = q; + ZEND_MM_POISON_CHUNK_HDR(p, heap); p = q; } /* reinitialize the first chunk and heap */ p = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(p); p->heap = &p->heap_slot; p->next = p; p->prev = p; @@ -2541,7 +2918,9 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) } else { zend_mm_refresh_key(heap); } + ZEND_MM_POISON_CHUNK_HDR(p, heap); } + ZEND_MM_POISON_HEAP(heap); } /**************/ @@ -2550,12 +2929,20 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) ZEND_API void* ZEND_FASTCALL _zend_mm_alloc(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { - return zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON_HEAP(heap); + void *ptr = zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + if (EXPECTED(ptr)) { + ZEND_MM_UNPOISON(ptr, size); + } + ZEND_MM_POISON_HEAP(heap); + return ptr; } ZEND_API void ZEND_FASTCALL _zend_mm_free(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(heap); zend_mm_free_heap(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(heap); } void* ZEND_FASTCALL _zend_mm_realloc(zend_mm_heap *heap, void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) @@ -2570,20 +2957,27 @@ void* ZEND_FASTCALL _zend_mm_realloc2(zend_mm_heap *heap, void *ptr, size_t size ZEND_API size_t ZEND_FASTCALL _zend_mm_block_size(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(heap); #if ZEND_MM_CUSTOM if (UNEXPECTED(heap->use_custom_heap)) { if (heap->custom_heap._malloc == tracked_malloc) { zend_ulong h = ((uintptr_t) ptr) >> ZEND_MM_ALIGNMENT_LOG2; zval *size_zv = zend_hash_index_find(heap->tracked_allocs, h); if (size_zv) { + ZEND_MM_POISON_HEAP(heap); return Z_LVAL_P(size_zv); } } else if (heap->custom_heap._malloc != poison_malloc) { + ZEND_MM_POISON_HEAP(heap); return 0; } + ZEND_MM_POISON_HEAP(heap); + return 0; } #endif - return zend_mm_size(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + size_t ret = zend_mm_size(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(heap); + return ret; } /**********************/ @@ -2606,7 +3000,10 @@ static zend_alloc_globals alloc_globals; ZEND_API bool is_zend_mm(void) { #if ZEND_MM_CUSTOM - return !AG(mm_heap)->use_custom_heap; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + bool ret = !AG(mm_heap)->use_custom_heap; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; #else return 1; #endif @@ -2614,40 +3011,57 @@ ZEND_API bool is_zend_mm(void) ZEND_API bool is_zend_ptr(const void *ptr) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); #if ZEND_MM_CUSTOM if (AG(mm_heap)->use_custom_heap) { if (AG(mm_heap)->custom_heap._malloc == tracked_malloc) { zend_ulong h = ((uintptr_t) ptr) >> ZEND_MM_ALIGNMENT_LOG2; zval *size_zv = zend_hash_index_find(AG(mm_heap)->tracked_allocs, h); if (size_zv) { + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 1; } } + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 0; } #endif if (AG(mm_heap)->main_chunk) { zend_mm_chunk *chunk = AG(mm_heap)->main_chunk; + zend_mm_chunk *next; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); do { if (ptr >= (void*)chunk && ptr < (void*)((char*)chunk + ZEND_MM_CHUNK_SIZE)) { + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 1; } - chunk = chunk->next; + next = chunk->next; + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); + chunk = next; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); } while (chunk != AG(mm_heap)->main_chunk); + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); } zend_mm_huge_list *block = AG(mm_heap)->huge_list; while (block) { + ZEND_MM_UNPOISON(block, sizeof(zend_mm_huge_list)); if (ptr >= block->ptr && ptr < (void*)((char*)block->ptr + block->size)) { + ZEND_MM_POISON(block, sizeof(zend_mm_huge_list)); + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 1; } - block = block->next; + zend_mm_huge_list *next = block->next; + ZEND_MM_POISON(block, sizeof(zend_mm_huge_list)); + block = next; } + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 0; } @@ -2657,12 +3071,15 @@ ZEND_API bool is_zend_ptr(const void *ptr) #if ZEND_MM_CUSTOM # define ZEND_MM_CUSTOM_ALLOCATOR(size) do { \ if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { \ - return AG(mm_heap)->custom_heap._malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + void *ret = AG(mm_heap)->custom_heap._malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ + return ret; \ } \ } while (0) # define ZEND_MM_CUSTOM_DEALLOCATOR(ptr) do { \ if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { \ AG(mm_heap)->custom_heap._free(ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ return; \ } \ } while (0) @@ -2673,57 +3090,81 @@ ZEND_API bool is_zend_ptr(const void *ptr) # define _ZEND_BIN_ALLOCATOR(_num, _size, _elements, _pages, _min_size, y) \ ZEND_API void* ZEND_FASTCALL _emalloc_ ## _size(void) { \ + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); \ ZEND_MM_CUSTOM_ALLOCATOR(_size); \ if (_size < _min_size) { \ - return _emalloc_ ## _min_size(); \ + void *ret = _emalloc_ ## _min_size(); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ + return ret; \ } \ - return zend_mm_alloc_small(AG(mm_heap), _num ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + void *ptr = zend_mm_alloc_small(AG(mm_heap), _num ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ + ZEND_MM_UNPOISON(ptr, _size); \ + return ptr; \ } ZEND_MM_BINS_INFO(_ZEND_BIN_ALLOCATOR, ZEND_MM_MIN_USEABLE_BIN_SIZE, y) ZEND_API void* ZEND_FASTCALL _emalloc_large(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); ZEND_MM_CUSTOM_ALLOCATOR(size); - return zend_mm_alloc_large_ex(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + void *ptr = zend_mm_alloc_large_ex(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + ZEND_MM_UNPOISON(ptr, size); + return ptr; } ZEND_API void* ZEND_FASTCALL _emalloc_huge(size_t size) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); ZEND_MM_CUSTOM_ALLOCATOR(size); - return zend_mm_alloc_huge(AG(mm_heap), size); + void *ptr = zend_mm_alloc_huge(AG(mm_heap), size); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + ZEND_MM_UNPOISON(ptr, size); + return ptr; } #if ZEND_DEBUG # define _ZEND_BIN_FREE(_num, _size, _elements, _pages, _min_size, y) \ ZEND_API void ZEND_FASTCALL _efree_ ## _size(void *ptr) { \ + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); \ ZEND_MM_CUSTOM_DEALLOCATOR(ptr); \ if (_size < _min_size) { \ _efree_ ## _min_size(ptr); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ return; \ } \ { \ size_t page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE); \ zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); \ int page_num = page_offset / ZEND_MM_PAGE_SIZE; \ + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); \ ZEND_MM_CHECK(chunk->heap == AG(mm_heap), "zend_mm_heap corrupted"); \ ZEND_ASSERT(chunk->map[page_num] & ZEND_MM_IS_SRUN); \ ZEND_ASSERT(ZEND_MM_SRUN_BIN_NUM(chunk->map[page_num]) == _num); \ + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); \ zend_mm_free_small(AG(mm_heap), ptr, _num); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ } \ } #else # define _ZEND_BIN_FREE(_num, _size, _elements, _pages, _min_size, y) \ ZEND_API void ZEND_FASTCALL _efree_ ## _size(void *ptr) { \ + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); \ ZEND_MM_CUSTOM_DEALLOCATOR(ptr); \ if (_size < _min_size) { \ _efree_ ## _min_size(ptr); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ return; \ } \ { \ zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); \ + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); \ ZEND_MM_CHECK(chunk->heap == AG(mm_heap), "zend_mm_heap corrupted"); \ + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); \ zend_mm_free_small(AG(mm_heap), ptr, _num); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ } \ } #endif @@ -2732,72 +3173,103 @@ ZEND_MM_BINS_INFO(_ZEND_BIN_FREE, ZEND_MM_MIN_USEABLE_BIN_SIZE, y) ZEND_API void ZEND_FASTCALL _efree_large(void *ptr, size_t size) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ZEND_MM_CUSTOM_DEALLOCATOR(ptr); { size_t page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE); zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); int page_num = page_offset / ZEND_MM_PAGE_SIZE; uint32_t pages_count = ZEND_MM_ALIGNED_SIZE_EX(size, ZEND_MM_PAGE_SIZE) / ZEND_MM_PAGE_SIZE; - + + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); ZEND_MM_CHECK(chunk->heap == AG(mm_heap) && ZEND_MM_ALIGNED_OFFSET(page_offset, ZEND_MM_PAGE_SIZE) == 0, "zend_mm_heap corrupted"); ZEND_ASSERT(chunk->map[page_num] & ZEND_MM_IS_LRUN); ZEND_ASSERT(ZEND_MM_LRUN_PAGES(chunk->map[page_num]) == pages_count); + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); + zend_mm_free_large(AG(mm_heap), chunk, page_num, pages_count); + ZEND_MM_POISON_HEAP(AG(mm_heap)); } } ZEND_API void ZEND_FASTCALL _efree_huge(void *ptr, size_t size) { - + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); ZEND_MM_CUSTOM_DEALLOCATOR(ptr); zend_mm_free_huge(AG(mm_heap), ptr); + ZEND_MM_POISON_HEAP(AG(mm_heap)); } #endif ZEND_API void* ZEND_FASTCALL _emalloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); #if ZEND_MM_CUSTOM if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { - return AG(mm_heap)->custom_heap._malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + void *ptr = AG(mm_heap)->custom_heap._malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } #endif - return zend_mm_alloc_heap(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + void *ptr = zend_mm_alloc_heap(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + if (EXPECTED(ptr)) { + ZEND_MM_UNPOISON(ptr, size); + } + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } ZEND_API void ZEND_FASTCALL _efree(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); #if ZEND_MM_CUSTOM if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { AG(mm_heap)->custom_heap._free(ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); return; } #endif zend_mm_free_heap(AG(mm_heap), ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); } ZEND_API void* ZEND_FASTCALL _erealloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); #if ZEND_MM_CUSTOM if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { - return AG(mm_heap)->custom_heap._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ptr = AG(mm_heap)->custom_heap._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } #endif - return zend_mm_realloc_heap(AG(mm_heap), ptr, size, 0, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ptr = zend_mm_realloc_heap(AG(mm_heap), ptr, size, 0, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } ZEND_API void* ZEND_FASTCALL _erealloc2(void *ptr, size_t size, size_t copy_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); #if ZEND_MM_CUSTOM if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { - return AG(mm_heap)->custom_heap._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ptr = AG(mm_heap)->custom_heap._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } #endif - return zend_mm_realloc_heap(AG(mm_heap), ptr, size, 1, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ptr = zend_mm_realloc_heap(AG(mm_heap), ptr, size, 1, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } ZEND_API size_t ZEND_FASTCALL _zend_mem_block_size(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { - return _zend_mm_block_size(AG(mm_heap), ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + size_t ret = _zend_mm_block_size(AG(mm_heap), ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } ZEND_API void* ZEND_FASTCALL _safe_emalloc(size_t nmemb, size_t size, size_t offset ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) @@ -2880,6 +3352,7 @@ ZEND_API char* ZEND_FASTCALL zend_strndup(const char *s, size_t length) ZEND_API zend_result zend_set_memory_limit(size_t memory_limit) { #if ZEND_MM_LIMIT + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); zend_mm_heap *heap = AG(mm_heap); if (UNEXPECTED(memory_limit < heap->real_size)) { @@ -2887,16 +3360,20 @@ ZEND_API zend_result zend_set_memory_limit(size_t memory_limit) /* free some cached chunks to fit into new memory limit */ do { zend_mm_chunk *p = heap->cached_chunks; + ZEND_MM_UNPOISON_CHUNK_HDR(p); heap->cached_chunks = p->next; zend_mm_chunk_free(heap, p, ZEND_MM_CHUNK_SIZE); heap->cached_chunks_count--; heap->real_size -= ZEND_MM_CHUNK_SIZE; } while (memory_limit < heap->real_size); + ZEND_MM_POISON_HEAP(AG(mm_heap)); return SUCCESS; } + ZEND_MM_POISON_HEAP(AG(mm_heap)); return FAILURE; } AG(mm_heap)->limit = memory_limit; + ZEND_MM_POISON_HEAP(AG(mm_heap)); #endif return SUCCESS; } @@ -2904,7 +3381,10 @@ ZEND_API zend_result zend_set_memory_limit(size_t memory_limit) ZEND_API bool zend_alloc_in_memory_limit_error_reporting(void) { #if ZEND_MM_LIMIT - return AG(mm_heap)->overflow; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + bool ret = AG(mm_heap)->overflow; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; #else return false; #endif @@ -2913,11 +3393,17 @@ ZEND_API bool zend_alloc_in_memory_limit_error_reporting(void) ZEND_API size_t zend_memory_usage(bool real_usage) { #if ZEND_MM_STAT + size_t ret; if (real_usage) { - return AG(mm_heap)->real_size; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ret = AG(mm_heap)->real_size; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } else { - size_t usage = AG(mm_heap)->size; - return usage; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ret = AG(mm_heap)->size; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } #endif return 0; @@ -2926,10 +3412,17 @@ ZEND_API size_t zend_memory_usage(bool real_usage) ZEND_API size_t zend_memory_peak_usage(bool real_usage) { #if ZEND_MM_STAT + size_t ret; if (real_usage) { - return AG(mm_heap)->real_peak; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ret = AG(mm_heap)->real_peak; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } else { - return AG(mm_heap)->peak; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ret = AG(mm_heap)->peak; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } #endif return 0; @@ -2938,8 +3431,10 @@ ZEND_API size_t zend_memory_peak_usage(bool real_usage) ZEND_API void zend_memory_reset_peak_usage(void) { #if ZEND_MM_STAT + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); AG(mm_heap)->real_peak = AG(mm_heap)->real_size; AG(mm_heap)->peak = AG(mm_heap)->size; + ZEND_MM_POISON_HEAP(AG(mm_heap)); #endif } @@ -2989,6 +3484,7 @@ static zend_always_inline void tracked_check_limit(zend_mm_heap *heap, size_t ad static void *tracked_malloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); tracked_check_limit(heap, size); void *ptr = malloc(size); @@ -3000,6 +3496,7 @@ static void *tracked_malloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC #if ZEND_MM_STAT heap->size += size; #endif + ZEND_MM_POISON_HEAP(heap); return ptr; } @@ -3009,16 +3506,19 @@ static void tracked_free(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { } zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); zval *size_zv = tracked_get_size_zv(heap, ptr); #if ZEND_MM_STAT heap->size -= Z_LVAL_P(size_zv); #endif zend_hash_del_bucket(heap->tracked_allocs, (Bucket *) size_zv); + ZEND_MM_POISON_HEAP(heap); free(ptr); } static void *tracked_realloc(void *ptr, size_t new_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); zval *old_size_zv = NULL; size_t old_size = 0; if (ptr) { @@ -3040,6 +3540,7 @@ static void *tracked_realloc(void *ptr, size_t new_size ZEND_FILE_LINE_DC ZEND_F #if ZEND_MM_STAT heap->size += new_size - old_size; #endif + ZEND_MM_POISON_HEAP(heap); return ptr; } @@ -3055,28 +3556,34 @@ static void tracked_free_all(zend_mm_heap *heap) { static void* poison_malloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); if (SIZE_MAX - heap->debug.padding * 2 < size) { zend_mm_panic("Integer overflow in memory allocation"); } - size += heap->debug.padding * 2; + size_t size_plus_padding = size + heap->debug.padding * 2; - void *ptr = zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + void *ptr = zend_mm_alloc_heap(heap, size_plus_padding ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); if (EXPECTED(ptr)) { if (heap->debug.poison_alloc) { - memset(ptr, heap->debug.poison_alloc_value, size); + ZEND_MM_UNPOISON(ptr, size_plus_padding); + memset(ptr, heap->debug.poison_alloc_value, size_plus_padding); + ZEND_MM_POISON(ptr, size_plus_padding); } ptr = (char*)ptr + heap->debug.padding; + ZEND_MM_UNPOISON(ptr, size); } + ZEND_MM_POISON_HEAP(heap); return ptr; } static void poison_free(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); if (EXPECTED(ptr)) { /* zend_mm_shutdown() will try to free the heap when custom handlers @@ -3090,11 +3597,14 @@ static void poison_free(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) size_t size = zend_mm_size(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); if (heap->debug.poison_free) { + ZEND_MM_UNPOISON(ptr, size); memset(ptr, heap->debug.poison_free_value, size); + ZEND_MM_POISON(ptr, size); } } zend_mm_free_heap(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(heap); } static void* poison_realloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) @@ -3103,6 +3613,7 @@ static void* poison_realloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_L void *new = poison_malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON_HEAP(heap); if (ptr) { /* Determine the size of the old allocation from the unpadded pointer. */ size_t oldsize = zend_mm_size(heap, (char*)ptr - heap->debug.padding ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); @@ -3114,9 +3625,11 @@ static void* poison_realloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_L oldsize -= sizeof(zend_mm_debug_info); #endif + ZEND_MM_UNPOISON(ptr, MIN(oldsize, size)); memcpy(new, ptr, MIN(oldsize, size)); poison_free(ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); } + ZEND_MM_POISON_HEAP(heap); return new; } @@ -3124,6 +3637,7 @@ static void* poison_realloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_L static size_t poison_gc(void) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC); void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC); @@ -3131,19 +3645,21 @@ static size_t poison_gc(void) size_t (*_gc)(void); void (*_shutdown)(bool, bool); - zend_mm_get_custom_handlers_ex(heap, &_malloc, &_free, &_realloc, &_gc, &_shutdown); - zend_mm_set_custom_handlers_ex(heap, NULL, NULL, NULL, NULL, NULL); - - size_t collected = zend_mm_gc(heap); + _zend_mm_get_custom_handlers_ex(heap, &_malloc, &_free, &_realloc, &_gc, &_shutdown); + _zend_mm_set_custom_handlers_ex(heap, NULL, NULL, NULL, NULL, NULL); - zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + size_t collected = _zend_mm_gc(heap); + _zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + + ZEND_MM_POISON_HEAP(heap); return collected; } static void poison_shutdown(bool full, bool silent) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC); void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC); @@ -3151,18 +3667,20 @@ static void poison_shutdown(bool full, bool silent) size_t (*_gc)(void); void (*_shutdown)(bool, bool); - zend_mm_get_custom_handlers_ex(heap, &_malloc, &_free, &_realloc, &_gc, &_shutdown); - zend_mm_set_custom_handlers_ex(heap, NULL, NULL, NULL, NULL, NULL); + _zend_mm_get_custom_handlers_ex(heap, &_malloc, &_free, &_realloc, &_gc, &_shutdown); + _zend_mm_set_custom_handlers_ex(heap, NULL, NULL, NULL, NULL, NULL); if (heap->debug.check_freelists_on_shutdown) { zend_mm_check_freelists(heap); } zend_mm_shutdown(heap, full, silent); + ZEND_MM_UNPOISON_HEAP(heap); if (!full) { - zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + _zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); } + ZEND_MM_POISON_HEAP(heap); } static void poison_enable(zend_mm_heap *heap, char *parameters) @@ -3242,7 +3760,7 @@ static void poison_enable(zend_mm_heap *heap, char *parameters) tmp++; } - zend_mm_set_custom_handlers_ex(heap, poison_malloc, poison_free, + _zend_mm_set_custom_handlers_ex(heap, poison_malloc, poison_free, poison_realloc, poison_gc, poison_shutdown); } #endif @@ -3333,22 +3851,15 @@ ZEND_API zend_mm_heap *zend_mm_get_heap(void) ZEND_API bool zend_mm_is_custom_heap(zend_mm_heap *new_heap) { #if ZEND_MM_CUSTOM - return AG(mm_heap)->use_custom_heap; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + bool ret = AG(mm_heap)->use_custom_heap; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; #else return 0; #endif } -ZEND_API void zend_mm_set_custom_handlers(zend_mm_heap *heap, - void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), - void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), - void* (*_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)) -{ -#if ZEND_MM_CUSTOM - zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, NULL, NULL); -#endif -} - ZEND_API void zend_mm_set_custom_handlers_ex(zend_mm_heap *heap, void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), @@ -3356,29 +3867,20 @@ ZEND_API void zend_mm_set_custom_handlers_ex(zend_mm_heap *heap, size_t (*_gc)(void), void (*_shutdown)(bool, bool)) { -#if ZEND_MM_CUSTOM - zend_mm_heap *_heap = (zend_mm_heap*)heap; - - if (!_malloc && !_free && !_realloc) { - _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_NONE; - } else { - _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_STD; - _heap->custom_heap._malloc = _malloc; - _heap->custom_heap._free = _free; - _heap->custom_heap._realloc = _realloc; - _heap->custom_heap._gc = _gc; - _heap->custom_heap._shutdown = _shutdown; - } -#endif + ZEND_MM_UNPOISON_HEAP(heap); + _zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + ZEND_MM_UNPOISON_HEAP(heap); } -ZEND_API void zend_mm_get_custom_handlers(zend_mm_heap *heap, - void* (**_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), - void (**_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), - void* (**_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)) +ZEND_API void zend_mm_set_custom_handlers(zend_mm_heap *heap, + void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void* (*_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)) { #if ZEND_MM_CUSTOM - zend_mm_get_custom_handlers_ex(heap, _malloc, _free, _realloc, NULL, NULL); + ZEND_MM_UNPOISON_HEAP(heap); + _zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, NULL, NULL); + ZEND_MM_POISON_HEAP(heap); #endif } @@ -3389,43 +3891,30 @@ ZEND_API void zend_mm_get_custom_handlers_ex(zend_mm_heap *heap, size_t (**_gc)(void), void (**_shutdown)(bool, bool)) { -#if ZEND_MM_CUSTOM - zend_mm_heap *_heap = (zend_mm_heap*)heap; + ZEND_MM_UNPOISON_HEAP(heap); + _zend_mm_get_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + ZEND_MM_POISON_HEAP(heap); +} - if (heap->use_custom_heap) { - *_malloc = _heap->custom_heap._malloc; - *_free = _heap->custom_heap._free; - *_realloc = _heap->custom_heap._realloc; - if (_gc != NULL) { - *_gc = _heap->custom_heap._gc; - } - if (_shutdown != NULL) { - *_shutdown = _heap->custom_heap._shutdown; - } - } else { - *_malloc = NULL; - *_free = NULL; - *_realloc = NULL; - if (_gc != NULL) { - *_gc = NULL; - } - if (_shutdown != NULL) { - *_shutdown = NULL; - } - } -#else - *_malloc = NULL; - *_free = NULL; - *_realloc = NULL; - *_gc = NULL; - *_shutdown = NULL; +ZEND_API void zend_mm_get_custom_handlers(zend_mm_heap *heap, + void* (**_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void (**_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void* (**_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)) +{ +#if ZEND_MM_CUSTOM + ZEND_MM_UNPOISON_HEAP(heap); + _zend_mm_get_custom_handlers_ex(heap, _malloc, _free, _realloc, NULL, NULL); + ZEND_MM_POISON_HEAP(heap); #endif } ZEND_API zend_mm_storage *zend_mm_get_storage(zend_mm_heap *heap) { #if ZEND_MM_STORAGE - return heap->storage; + ZEND_MM_UNPOISON_HEAP(heap); + zend_mm_storage *ret = heap->storage; + ZEND_MM_POISON_HEAP(heap); + return ret; #else return NULL; #endif @@ -3452,6 +3941,7 @@ ZEND_API zend_mm_heap *zend_mm_startup_ex(const zend_mm_handlers *handlers, void #endif return NULL; } + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); heap = &chunk->heap_slot; chunk->heap = heap; chunk->next = chunk; @@ -3503,6 +3993,7 @@ ZEND_API zend_mm_heap *zend_mm_startup_ex(const zend_mm_handlers *handlers, void } heap->storage = storage; heap->pid = getpid(); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return heap; #else return NULL; diff --git a/sapi/fpm/tests/gh9754-daemonized-stderr-close.phpt b/sapi/fpm/tests/gh9754-daemonized-stderr-close.phpt index 37061ba967eae..4f3f41fde8240 100644 --- a/sapi/fpm/tests/gh9754-daemonized-stderr-close.phpt +++ b/sapi/fpm/tests/gh9754-daemonized-stderr-close.phpt @@ -10,6 +10,15 @@ FPM\Tester::skipIfRoot(); require_once "tester.inc"; +// TODO: fix leak on shutdown caused by alloc_globals_dtor not getting invoked +$opts = getenv("ASAN_OPTIONS") ?: ''; +if ($opts) { + $opts .= ",detect_leaks=0"; +} else { + $opts = "detect_leaks=0"; +} +putenv("ASAN_OPTIONS=$opts"); + $cfg = << pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy