[Bug] Wrong MEM_POOL check in rt_smem_free
RT-Thread Version
master
Hardware Type/Architectures
all
Develop Toolchain
IAR
Describe the bug
Describe the bug
In rt_smem_free() (file: src/mem.c), the following assertion is used:
RT_ASSERT(MEM_POOL(&small_mem->heap_ptr[mem->next]) == small_mem);
This assertion fails incorrectly when freeing a block located near the heap end (heap_end).
At this point, the logic actually refers to the previous block (mem->prev), not mem->next.
Since the heap_end block is always marked as USED, MEM_POOL(...) does not equal small_mem, causing a false assertion failure.
How to reproduce
- Initialize rt_smem with a small heap.
- Allocate and free a block close to the heap_end.
- Observe assert failed inside rt_smem_free.
Expected behavior Memory should be freed correctly without triggering a false assertion.
Root cause
- The assertion checks mem->next, which may point to the sentinel heap_end.
- heap_end is always USED, so MEM_POOL() fails.
- The check should instead validate mem->prev at this point.
Proposed fix Change the assertion from mem->next to mem->prev. This aligns the implementation with the original lwIP small memory allocator.
--- a/src/mem.c
+++ b/src/mem.c
@@ void rt_smem_free(void *rmem)
- RT_ASSERT(MEM_POOL(&small_mem->heap_ptr[mem->next]) == small_mem);
+ RT_ASSERT(MEM_POOL(&small_mem->heap_ptr[mem->prev]) == small_mem);
Environment
- RT-Thread version: [master branch]
- Module: components src/mem.c
- Target: [all]
Additional context
- Verified with test cases: issue reproduced on small heap allocation/free.
- After patch, allocator works without false assertion.
- Behavior now matches lwIP mem_free() / plug_holes().
Other additional context
No response
Hi @embedder71,I added the following code for testing under IAR environment, but failed to reproduce your issue. Could you please provide your test case?
struct rt_small_mem_item
{
rt_uintptr_t pool_ptr; /**< small memory object addr */
rt_size_t next; /**< next free item */
rt_size_t prev; /**< prev free item */
#ifdef RT_USING_MEMTRACE
#ifdef ARCH_CPU_64BIT
rt_uint8_t thread[8]; /**< thread name */
#else
rt_uint8_t thread[4]; /**< thread name */
#endif /* ARCH_CPU_64BIT */
#endif /* RT_USING_MEMTRACE */
};
struct rt_small_mem
{
struct rt_memory parent; /**< inherit from rt_memory */
rt_uint8_t *heap_ptr; /**< pointer to the heap */
struct rt_small_mem_item *heap_end;
struct rt_small_mem_item *lfree;
rt_size_t mem_size_aligned; /**< aligned memory size */
};
#define MIN_SIZE (sizeof(rt_uintptr_t) + sizeof(rt_size_t) + sizeof(rt_size_t))
#define MIN_SIZE_ALIGNED RT_ALIGN(MIN_SIZE, RT_ALIGN_SIZE)
#define SIZEOF_STRUCT_MEM RT_ALIGN(sizeof(struct rt_small_mem_item), RT_ALIGN_SIZE)
#include <rtthread.h>
#include <rthw.h>
#ifdef RT_USING_SMALL_MEM
void test_smem_free_bug(void)
{
#define TEST_HEAP_SIZE (256)
static rt_uint8_t test_heap[TEST_HEAP_SIZE];
rt_smem_t mem_pool;
void *ptr;
// Initialize small memory management object
mem_pool = rt_smem_init("test_heap", test_heap, TEST_HEAP_SIZE);
if (mem_pool == RT_NULL)
{
rt_kprintf("Failed to initialize memory pool\n");
return;
}
struct rt_small_mem *small_mem = (struct rt_small_mem *)mem_pool;
rt_kprintf("Memory pool initialized successfully\n");
rt_kprintf("Total heap size: %d bytes\n", TEST_HEAP_SIZE);
rt_kprintf("Heap start: 0x%p\n", small_mem->heap_ptr);
rt_kprintf("Heap end: 0x%p\n", small_mem->heap_end);
rt_kprintf("Aligned memory size: %d bytes\n", small_mem->mem_size_aligned);
// Calculate the offset of the heap end
rt_size_t heap_end_offset = (rt_uint8_t *)small_mem->heap_end - small_mem->heap_ptr;
rt_kprintf("Heap end offset: %d\n", heap_end_offset);
// Try to allocate a block so that its "next" pointer exactly points to heap_end
// We need to find a suitable allocation size such that the allocated block's next pointer equals heap_end_offset
// First, let's check the current free block information
rt_kprintf("Initial free block: next=%d, prev=%d\n",
small_mem->lfree->next, small_mem->lfree->prev);
// Try to allocate a specific size block so it is adjacent to the heap end
// Compute the required allocation size: heap_end_offset - SIZEOF_STRUCT_MEM - current_offset
rt_size_t current_offset = (rt_uint8_t *)small_mem->lfree - small_mem->heap_ptr;
rt_size_t alloc_size = heap_end_offset - current_offset - 2 * SIZEOF_STRUCT_MEM;
rt_kprintf("Current offset: %d\n", current_offset);
rt_kprintf("Need to allocate: %d bytes\n", alloc_size);
if (alloc_size > 0 && alloc_size < small_mem->mem_size_aligned)
{
ptr = rt_smem_alloc(mem_pool, alloc_size);
if (ptr != RT_NULL)
{
rt_kprintf("Allocated block at 0x%p with size %d\n", ptr, alloc_size);
// Check whether this block's next pointer points to heap_end
struct rt_small_mem_item *mem = (struct rt_small_mem_item *)((rt_uint8_t *)ptr - SIZEOF_STRUCT_MEM);
rt_kprintf("Block next pointer: %d\n", mem->next);
rt_kprintf("Heap end offset: %d\n", heap_end_offset);
if (mem->next == heap_end_offset)
{
rt_kprintf("SUCCESS: This block points directly to heap_end!\n");
rt_kprintf("Freeing block (should trigger assertion failure)...\n");
// This should trigger an assertion failure
rt_smem_free(ptr);
rt_kprintf("Block freed successfully\n");
}
else
{
rt_kprintf("Block does not point to heap_end\n");
rt_smem_free(ptr);
}
}
else
{
rt_kprintf("Failed to allocate block of size %d\n", alloc_size);
}
}
else
{
rt_kprintf("Invalid allocation size: %d\n", alloc_size);
}
rt_smem_detach(mem_pool);
}
MSH_CMD_EXPORT(test_smem_free_bug, Test smem free bug near heap end);
#endif /* RT_USING_SMALL_MEM */