2009-07-03 06:48:23 +08:00
|
|
|
/*
|
2022-08-03 00:09:49 +08:00
|
|
|
* Copyright (c) 2006-2022, RT-Thread Development Team
|
2013-06-24 17:06:09 +08:00
|
|
|
*
|
2018-09-14 22:37:43 +08:00
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
2009-07-03 06:48:23 +08:00
|
|
|
*
|
|
|
|
* Change Logs:
|
|
|
|
* Date Author Notes
|
2010-06-09 14:43:14 +08:00
|
|
|
* 2008-7-12 Bernard the first version
|
|
|
|
* 2010-06-09 Bernard fix the end stub of heap
|
|
|
|
* fix memory check in rt_realloc function
|
2010-07-13 15:36:37 +08:00
|
|
|
* 2010-07-13 Bernard fix RT_ALIGN issue found by kuronca
|
2010-10-14 17:38:03 +08:00
|
|
|
* 2010-10-14 Bernard fix rt_realloc issue when realloc a NULL pointer.
|
2017-08-14 15:57:42 +08:00
|
|
|
* 2017-07-14 armink fix rt_realloc issue when new size is 0
|
2018-10-26 06:35:42 +08:00
|
|
|
* 2018-10-02 Bernard Add 64bit support
|
2009-07-03 06:48:23 +08:00
|
|
|
*/
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Copyright (c) 2001-2004 Swedish Institute of Computer Science.
|
|
|
|
* All rights reserved.
|
|
|
|
*
|
|
|
|
* Redistribution and use in source and binary forms, with or without modification,
|
|
|
|
* are permitted provided that the following conditions are met:
|
|
|
|
*
|
|
|
|
* 1. Redistributions of source code must retain the above copyright notice,
|
|
|
|
* this list of conditions and the following disclaimer.
|
|
|
|
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
* this list of conditions and the following disclaimer in the documentation
|
|
|
|
* and/or other materials provided with the distribution.
|
|
|
|
* 3. The name of the author may not be used to endorse or promote products
|
|
|
|
* derived from this software without specific prior written permission.
|
|
|
|
*
|
|
|
|
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
|
|
|
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
|
|
|
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
|
|
|
|
* SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
|
|
|
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
|
|
|
|
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
|
|
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
|
|
|
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
|
|
|
|
* OF SUCH DAMAGE.
|
|
|
|
*
|
|
|
|
* This file is part of the lwIP TCP/IP stack.
|
|
|
|
*
|
|
|
|
* Author: Adam Dunkels <adam@sics.se>
|
|
|
|
* Simon Goldschmidt
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2011-09-08 10:17:18 +08:00
|
|
|
#include <rthw.h>
|
2009-07-03 06:48:23 +08:00
|
|
|
#include <rtthread.h>
|
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
#if defined (RT_USING_SMALL_MEM)
|
|
|
|
/**
|
|
|
|
* memory item on the small mem
|
|
|
|
*/
|
|
|
|
struct rt_small_mem_item
|
2009-07-03 06:48:23 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_ubase_t pool_ptr; /**< small memory object addr */
|
2018-10-26 06:35:42 +08:00
|
|
|
#ifdef ARCH_CPU_64BIT
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_uint32_t resv;
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* ARCH_CPU_64BIT */
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_size_t next; /**< next free item */
|
|
|
|
rt_size_t prev; /**< prev free item */
|
2017-12-22 14:29:14 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2018-10-26 06:35:42 +08:00
|
|
|
#ifdef ARCH_CPU_64BIT
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_uint8_t thread[8]; /**< thread name */
|
2018-10-26 06:35:42 +08:00
|
|
|
#else
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_uint8_t thread[4]; /**< thread name */
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* ARCH_CPU_64BIT */
|
|
|
|
#endif /* RT_USING_MEMTRACE */
|
2009-07-03 06:48:23 +08:00
|
|
|
};
|
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
/**
|
|
|
|
* Base structure of small memory object
|
|
|
|
*/
|
|
|
|
struct rt_small_mem
|
|
|
|
{
|
|
|
|
struct rt_memory parent; /**< inherit from rt_memory */
|
|
|
|
rt_uint8_t *heap_ptr; /**< pointer to the heap */
|
|
|
|
struct rt_small_mem_item *heap_end;
|
|
|
|
struct rt_small_mem_item *lfree;
|
|
|
|
rt_size_t mem_size_aligned; /**< aligned memory size */
|
|
|
|
};
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
#define HEAP_MAGIC 0x1ea0
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2018-10-26 06:35:42 +08:00
|
|
|
#ifdef ARCH_CPU_64BIT
|
|
|
|
#define MIN_SIZE 24
|
|
|
|
#else
|
2009-07-03 06:48:23 +08:00
|
|
|
#define MIN_SIZE 12
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* ARCH_CPU_64BIT */
|
2018-10-26 06:35:42 +08:00
|
|
|
|
2023-05-11 10:25:21 +08:00
|
|
|
#define MEM_MASK ((~(rt_size_t)0) - 1)
|
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
#define MEM_USED() ((((rt_base_t)(small_mem)) & MEM_MASK) | 0x1)
|
|
|
|
#define MEM_FREED() ((((rt_base_t)(small_mem)) & MEM_MASK) | 0x0)
|
|
|
|
#define MEM_ISUSED(_mem) \
|
|
|
|
(((rt_base_t)(((struct rt_small_mem_item *)(_mem))->pool_ptr)) & (~MEM_MASK))
|
|
|
|
#define MEM_POOL(_mem) \
|
|
|
|
((struct rt_small_mem *)(((rt_base_t)(((struct rt_small_mem_item *)(_mem))->pool_ptr)) & (MEM_MASK)))
|
|
|
|
#define MEM_SIZE(_heap, _mem) \
|
|
|
|
(((struct rt_small_mem_item *)(_mem))->next - ((rt_ubase_t)(_mem) - \
|
|
|
|
(rt_ubase_t)((_heap)->heap_ptr)) - RT_ALIGN(sizeof(struct rt_small_mem_item), RT_ALIGN_SIZE))
|
2009-08-03 22:00:27 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
#define MIN_SIZE_ALIGNED RT_ALIGN(MIN_SIZE, RT_ALIGN_SIZE)
|
|
|
|
#define SIZEOF_STRUCT_MEM RT_ALIGN(sizeof(struct rt_small_mem_item), RT_ALIGN_SIZE)
|
2021-06-11 13:07:29 +08:00
|
|
|
|
2017-12-22 14:29:14 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_inline void rt_smem_setname(struct rt_small_mem_item *mem, const char *name)
|
2017-12-22 14:29:14 +08:00
|
|
|
{
|
|
|
|
int index;
|
|
|
|
for (index = 0; index < sizeof(mem->thread); index ++)
|
|
|
|
{
|
|
|
|
if (name[index] == '\0') break;
|
|
|
|
mem->thread[index] = name[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
for (; index < sizeof(mem->thread); index ++)
|
|
|
|
{
|
|
|
|
mem->thread[index] = ' ';
|
|
|
|
}
|
|
|
|
}
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* RT_USING_MEMTRACE */
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
static void plug_holes(struct rt_small_mem *m, struct rt_small_mem_item *mem)
|
2009-07-03 06:48:23 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
struct rt_small_mem_item *nmem;
|
|
|
|
struct rt_small_mem_item *pmem;
|
2012-12-25 16:27:29 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
RT_ASSERT((rt_uint8_t *)mem >= m->heap_ptr);
|
|
|
|
RT_ASSERT((rt_uint8_t *)mem < (rt_uint8_t *)m->heap_end);
|
2012-12-25 16:27:29 +08:00
|
|
|
|
|
|
|
/* plug hole forward */
|
2021-12-16 16:23:58 +08:00
|
|
|
nmem = (struct rt_small_mem_item *)&m->heap_ptr[mem->next];
|
|
|
|
if (mem != nmem && !MEM_ISUSED(nmem) &&
|
|
|
|
(rt_uint8_t *)nmem != (rt_uint8_t *)m->heap_end)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
/* if mem->next is unused and not end of m->heap_ptr,
|
2012-12-25 16:27:29 +08:00
|
|
|
* combine mem and mem->next
|
|
|
|
*/
|
2021-12-16 16:23:58 +08:00
|
|
|
if (m->lfree == nmem)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
m->lfree = mem;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
nmem->pool_ptr = 0;
|
2012-12-25 16:27:29 +08:00
|
|
|
mem->next = nmem->next;
|
2021-12-16 16:23:58 +08:00
|
|
|
((struct rt_small_mem_item *)&m->heap_ptr[nmem->next])->prev = (rt_uint8_t *)mem - m->heap_ptr;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/* plug hole backward */
|
2021-12-16 16:23:58 +08:00
|
|
|
pmem = (struct rt_small_mem_item *)&m->heap_ptr[mem->prev];
|
|
|
|
if (pmem != mem && !MEM_ISUSED(pmem))
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
|
|
|
/* if mem->prev is unused, combine mem and mem->prev */
|
2021-12-16 16:23:58 +08:00
|
|
|
if (m->lfree == mem)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
m->lfree = pmem;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
mem->pool_ptr = 0;
|
2012-12-25 16:27:29 +08:00
|
|
|
pmem->next = mem->next;
|
2021-12-16 16:23:58 +08:00
|
|
|
((struct rt_small_mem_item *)&m->heap_ptr[mem->next])->prev = (rt_uint8_t *)pmem - m->heap_ptr;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
2009-07-03 06:48:23 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2021-12-16 16:23:58 +08:00
|
|
|
* @brief This function will initialize small memory management algorithm.
|
|
|
|
*
|
|
|
|
* @param name is the name of the small memory management object.
|
|
|
|
*
|
|
|
|
* @param begin_addr the beginning address of memory.
|
2009-07-03 06:48:23 +08:00
|
|
|
*
|
2021-12-16 16:23:58 +08:00
|
|
|
* @param size is the size of the memory.
|
2021-09-10 15:55:08 +08:00
|
|
|
*
|
2021-12-16 16:23:58 +08:00
|
|
|
* @return Return a pointer to the memory object. When the return value is RT_NULL, it means the init failed.
|
2009-07-03 06:48:23 +08:00
|
|
|
*/
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_t rt_smem_init(const char *name,
|
|
|
|
void *begin_addr,
|
|
|
|
rt_size_t size)
|
2009-07-03 06:48:23 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
struct rt_small_mem_item *mem;
|
|
|
|
struct rt_small_mem *small_mem;
|
|
|
|
rt_ubase_t start_addr, begin_align, end_align, mem_size;
|
2012-12-25 16:27:29 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem = (struct rt_small_mem *)RT_ALIGN((rt_ubase_t)begin_addr, RT_ALIGN_SIZE);
|
|
|
|
start_addr = (rt_ubase_t)small_mem + sizeof(*small_mem);
|
|
|
|
begin_align = RT_ALIGN((rt_ubase_t)start_addr, RT_ALIGN_SIZE);
|
|
|
|
end_align = RT_ALIGN_DOWN((rt_ubase_t)begin_addr + size, RT_ALIGN_SIZE);
|
2012-12-25 16:27:29 +08:00
|
|
|
|
|
|
|
/* alignment addr */
|
|
|
|
if ((end_align > (2 * SIZEOF_STRUCT_MEM)) &&
|
2021-12-16 16:23:58 +08:00
|
|
|
((end_align - 2 * SIZEOF_STRUCT_MEM) >= start_addr))
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
|
|
|
/* calculate the aligned memory size */
|
2021-12-16 16:23:58 +08:00
|
|
|
mem_size = end_align - begin_align - 2 * SIZEOF_STRUCT_MEM;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
rt_kprintf("mem init, error begin address 0x%x, and end address 0x%x\n",
|
2021-12-16 16:23:58 +08:00
|
|
|
(rt_ubase_t)begin_addr, (rt_ubase_t)begin_addr + size);
|
2012-12-25 16:27:29 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
return RT_NULL;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_memset(small_mem, 0, sizeof(*small_mem));
|
|
|
|
/* initialize small memory object */
|
|
|
|
rt_object_init(&(small_mem->parent.parent), RT_Object_Class_Memory, name);
|
|
|
|
small_mem->parent.algorithm = "small";
|
|
|
|
small_mem->parent.address = begin_align;
|
|
|
|
small_mem->parent.total = mem_size;
|
|
|
|
small_mem->mem_size_aligned = mem_size;
|
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
/* point to begin address of heap */
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->heap_ptr = (rt_uint8_t *)begin_align;
|
2012-12-25 16:27:29 +08:00
|
|
|
|
|
|
|
RT_DEBUG_LOG(RT_DEBUG_MEM, ("mem init, heap begin address 0x%x, size %d\n",
|
2021-12-16 16:23:58 +08:00
|
|
|
(rt_ubase_t)small_mem->heap_ptr, small_mem->mem_size_aligned));
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
/* initialize the start of the heap */
|
2021-12-16 16:23:58 +08:00
|
|
|
mem = (struct rt_small_mem_item *)small_mem->heap_ptr;
|
|
|
|
mem->pool_ptr = MEM_FREED();
|
|
|
|
mem->next = small_mem->mem_size_aligned + SIZEOF_STRUCT_MEM;
|
2012-12-25 16:27:29 +08:00
|
|
|
mem->prev = 0;
|
2018-02-24 16:10:44 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_setname(mem, "INIT");
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* RT_USING_MEMTRACE */
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
/* initialize the end of the heap */
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->heap_end = (struct rt_small_mem_item *)&small_mem->heap_ptr[mem->next];
|
|
|
|
small_mem->heap_end->pool_ptr = MEM_USED();
|
|
|
|
small_mem->heap_end->next = small_mem->mem_size_aligned + SIZEOF_STRUCT_MEM;
|
|
|
|
small_mem->heap_end->prev = small_mem->mem_size_aligned + SIZEOF_STRUCT_MEM;
|
2018-02-24 16:10:44 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_setname(small_mem->heap_end, "INIT");
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* RT_USING_MEMTRACE */
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
/* initialize the lowest-free pointer to the start of the heap */
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->lfree = (struct rt_small_mem_item *)small_mem->heap_ptr;
|
|
|
|
|
|
|
|
return &small_mem->parent;
|
2009-07-03 06:48:23 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
RTM_EXPORT(rt_smem_init);
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief This function will remove a small mem from the system.
|
|
|
|
*
|
|
|
|
* @param m the small memory management object.
|
|
|
|
*
|
|
|
|
* @return RT_EOK
|
|
|
|
*/
|
|
|
|
rt_err_t rt_smem_detach(rt_smem_t m)
|
|
|
|
{
|
|
|
|
RT_ASSERT(m != RT_NULL);
|
|
|
|
RT_ASSERT(rt_object_get_type(&m->parent) == RT_Object_Class_Memory);
|
|
|
|
RT_ASSERT(rt_object_is_systemobject(&m->parent));
|
|
|
|
|
|
|
|
rt_object_detach(&(m->parent));
|
|
|
|
|
|
|
|
return RT_EOK;
|
|
|
|
}
|
|
|
|
RTM_EXPORT(rt_smem_detach);
|
2009-07-03 06:48:23 +08:00
|
|
|
|
|
|
|
/**
|
|
|
|
* @addtogroup MM
|
|
|
|
*/
|
|
|
|
|
2016-08-19 10:11:38 +08:00
|
|
|
/**@{*/
|
2009-07-03 06:48:23 +08:00
|
|
|
|
|
|
|
/**
|
2021-09-10 15:55:08 +08:00
|
|
|
* @brief Allocate a block of memory with a minimum of 'size' bytes.
|
2009-07-03 06:48:23 +08:00
|
|
|
*
|
2021-12-16 16:23:58 +08:00
|
|
|
* @param m the small memory management object.
|
|
|
|
*
|
2009-07-03 06:48:23 +08:00
|
|
|
* @param size is the minimum size of the requested block in bytes.
|
|
|
|
*
|
2021-09-11 16:40:56 +08:00
|
|
|
* @return the pointer to allocated memory or NULL if no free memory was found.
|
2009-07-03 06:48:23 +08:00
|
|
|
*/
|
2021-12-16 16:23:58 +08:00
|
|
|
void *rt_smem_alloc(rt_smem_t m, rt_size_t size)
|
2009-07-03 06:48:23 +08:00
|
|
|
{
|
2012-12-25 16:27:29 +08:00
|
|
|
rt_size_t ptr, ptr2;
|
2021-12-16 16:23:58 +08:00
|
|
|
struct rt_small_mem_item *mem, *mem2;
|
|
|
|
struct rt_small_mem *small_mem;
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
if (size == 0)
|
|
|
|
return RT_NULL;
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
RT_ASSERT(m != RT_NULL);
|
|
|
|
RT_ASSERT(rt_object_get_type(&m->parent) == RT_Object_Class_Memory);
|
|
|
|
RT_ASSERT(rt_object_is_systemobject(&m->parent));
|
2018-10-23 16:03:17 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
if (size != RT_ALIGN(size, RT_ALIGN_SIZE))
|
2022-08-03 00:09:49 +08:00
|
|
|
{
|
2012-12-25 16:27:29 +08:00
|
|
|
RT_DEBUG_LOG(RT_DEBUG_MEM, ("malloc size %d, but align to %d\n",
|
2012-12-20 15:25:19 +08:00
|
|
|
size, RT_ALIGN(size, RT_ALIGN_SIZE)));
|
2022-08-03 00:09:49 +08:00
|
|
|
}
|
2012-12-25 16:27:29 +08:00
|
|
|
else
|
2022-08-03 00:09:49 +08:00
|
|
|
{
|
2012-12-25 16:27:29 +08:00
|
|
|
RT_DEBUG_LOG(RT_DEBUG_MEM, ("malloc size %d\n", size));
|
2022-08-03 00:09:49 +08:00
|
|
|
}
|
2012-12-25 16:27:29 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem = (struct rt_small_mem *)m;
|
2012-12-25 16:27:29 +08:00
|
|
|
/* alignment size */
|
|
|
|
size = RT_ALIGN(size, RT_ALIGN_SIZE);
|
|
|
|
|
2022-06-07 23:57:18 +08:00
|
|
|
/* every data block must be at least MIN_SIZE_ALIGNED long */
|
|
|
|
if (size < MIN_SIZE_ALIGNED)
|
|
|
|
size = MIN_SIZE_ALIGNED;
|
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
if (size > small_mem->mem_size_aligned)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
|
|
|
RT_DEBUG_LOG(RT_DEBUG_MEM, ("no memory\n"));
|
|
|
|
|
|
|
|
return RT_NULL;
|
|
|
|
}
|
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
for (ptr = (rt_uint8_t *)small_mem->lfree - small_mem->heap_ptr;
|
|
|
|
ptr <= small_mem->mem_size_aligned - size;
|
|
|
|
ptr = ((struct rt_small_mem_item *)&small_mem->heap_ptr[ptr])->next)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
mem = (struct rt_small_mem_item *)&small_mem->heap_ptr[ptr];
|
2012-12-25 16:27:29 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
if ((!MEM_ISUSED(mem)) && (mem->next - (ptr + SIZEOF_STRUCT_MEM)) >= size)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
|
|
|
/* mem is not used and at least perfect fit is possible:
|
|
|
|
* mem->next - (ptr + SIZEOF_STRUCT_MEM) gives us the 'user data size' of mem */
|
|
|
|
|
|
|
|
if (mem->next - (ptr + SIZEOF_STRUCT_MEM) >=
|
|
|
|
(size + SIZEOF_STRUCT_MEM + MIN_SIZE_ALIGNED))
|
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
/* (in addition to the above, we test if another struct rt_small_mem_item (SIZEOF_STRUCT_MEM) containing
|
2012-12-25 16:27:29 +08:00
|
|
|
* at least MIN_SIZE_ALIGNED of data also fits in the 'user data space' of 'mem')
|
|
|
|
* -> split large block, create empty remainder,
|
|
|
|
* remainder must be large enough to contain MIN_SIZE_ALIGNED data: if
|
|
|
|
* mem->next - (ptr + (2*SIZEOF_STRUCT_MEM)) == size,
|
2021-12-16 16:23:58 +08:00
|
|
|
* struct rt_small_mem_item would fit in but no data between mem2 and mem2->next
|
2012-12-25 16:27:29 +08:00
|
|
|
* @todo we could leave out MIN_SIZE_ALIGNED. We would create an empty
|
|
|
|
* region that couldn't hold data, but when mem->next gets freed,
|
|
|
|
* the 2 regions would be combined, resulting in more free memory
|
|
|
|
*/
|
|
|
|
ptr2 = ptr + SIZEOF_STRUCT_MEM + size;
|
|
|
|
|
|
|
|
/* create mem2 struct */
|
2021-12-16 16:23:58 +08:00
|
|
|
mem2 = (struct rt_small_mem_item *)&small_mem->heap_ptr[ptr2];
|
|
|
|
mem2->pool_ptr = MEM_FREED();
|
2012-12-25 16:27:29 +08:00
|
|
|
mem2->next = mem->next;
|
|
|
|
mem2->prev = ptr;
|
2018-02-24 16:10:44 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_setname(mem2, " ");
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* RT_USING_MEMTRACE */
|
2012-12-25 16:27:29 +08:00
|
|
|
|
|
|
|
/* and insert it between mem and mem->next */
|
|
|
|
mem->next = ptr2;
|
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
if (mem2->next != small_mem->mem_size_aligned + SIZEOF_STRUCT_MEM)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
((struct rt_small_mem_item *)&small_mem->heap_ptr[mem2->next])->prev = ptr2;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->parent.used += (size + SIZEOF_STRUCT_MEM);
|
|
|
|
if (small_mem->parent.max < small_mem->parent.used)
|
|
|
|
small_mem->parent.max = small_mem->parent.used;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
/* (a mem2 struct does no fit into the user data space of mem and mem->next will always
|
|
|
|
* be used at this point: if not we have 2 unused structs in a row, plug_holes should have
|
|
|
|
* take care of this).
|
|
|
|
* -> near fit or excact fit: do not split, no mem2 creation
|
|
|
|
* also can't move mem->next directly behind mem, since mem->next
|
|
|
|
* will always be used at this point!
|
|
|
|
*/
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->parent.used += mem->next - ((rt_uint8_t *)mem - small_mem->heap_ptr);
|
|
|
|
if (small_mem->parent.max < small_mem->parent.used)
|
|
|
|
small_mem->parent.max = small_mem->parent.used;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
/* set small memory object */
|
|
|
|
mem->pool_ptr = MEM_USED();
|
2018-02-24 16:10:44 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2017-12-22 14:29:14 +08:00
|
|
|
if (rt_thread_self())
|
2023-04-04 21:06:27 +08:00
|
|
|
rt_smem_setname(mem, rt_thread_self()->parent.name);
|
2017-12-22 14:29:14 +08:00
|
|
|
else
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_setname(mem, "NONE");
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* RT_USING_MEMTRACE */
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
if (mem == small_mem->lfree)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
|
|
|
/* Find next free block after mem and update lowest free pointer */
|
2021-12-16 16:23:58 +08:00
|
|
|
while (MEM_ISUSED(small_mem->lfree) && small_mem->lfree != small_mem->heap_end)
|
|
|
|
small_mem->lfree = (struct rt_small_mem_item *)&small_mem->heap_ptr[small_mem->lfree->next];
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
RT_ASSERT(((small_mem->lfree == small_mem->heap_end) || (!MEM_ISUSED(small_mem->lfree))));
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
RT_ASSERT((rt_ubase_t)mem + SIZEOF_STRUCT_MEM + size <= (rt_ubase_t)small_mem->heap_end);
|
2018-10-26 06:35:42 +08:00
|
|
|
RT_ASSERT((rt_ubase_t)((rt_uint8_t *)mem + SIZEOF_STRUCT_MEM) % RT_ALIGN_SIZE == 0);
|
|
|
|
RT_ASSERT((((rt_ubase_t)mem) & (RT_ALIGN_SIZE - 1)) == 0);
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
RT_DEBUG_LOG(RT_DEBUG_MEM,
|
2013-06-24 17:06:09 +08:00
|
|
|
("allocate memory at 0x%x, size: %d\n",
|
2018-10-26 06:35:42 +08:00
|
|
|
(rt_ubase_t)((rt_uint8_t *)mem + SIZEOF_STRUCT_MEM),
|
2021-12-16 16:23:58 +08:00
|
|
|
(rt_ubase_t)(mem->next - ((rt_uint8_t *)mem - small_mem->heap_ptr))));
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
/* return the memory data except mem struct */
|
|
|
|
return (rt_uint8_t *)mem + SIZEOF_STRUCT_MEM;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return RT_NULL;
|
2009-07-03 06:48:23 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
RTM_EXPORT(rt_smem_alloc);
|
2009-07-03 06:48:23 +08:00
|
|
|
|
|
|
|
/**
|
2021-09-11 16:40:56 +08:00
|
|
|
* @brief This function will change the size of previously allocated memory block.
|
2009-07-03 06:48:23 +08:00
|
|
|
*
|
2021-12-16 16:23:58 +08:00
|
|
|
* @param m the small memory management object.
|
|
|
|
*
|
|
|
|
* @param rmem is the pointer to memory allocated by rt_mem_alloc.
|
2009-12-25 20:18:53 +08:00
|
|
|
*
|
2021-09-11 16:40:56 +08:00
|
|
|
* @param newsize is the required new size.
|
2021-09-10 15:55:08 +08:00
|
|
|
*
|
|
|
|
* @return the changed memory block address.
|
2009-07-03 06:48:23 +08:00
|
|
|
*/
|
2021-12-16 16:23:58 +08:00
|
|
|
void *rt_smem_realloc(rt_smem_t m, void *rmem, rt_size_t newsize)
|
2009-07-03 06:48:23 +08:00
|
|
|
{
|
2012-12-25 16:27:29 +08:00
|
|
|
rt_size_t size;
|
|
|
|
rt_size_t ptr, ptr2;
|
2021-12-16 16:23:58 +08:00
|
|
|
struct rt_small_mem_item *mem, *mem2;
|
|
|
|
struct rt_small_mem *small_mem;
|
2012-12-25 16:27:29 +08:00
|
|
|
void *nmem;
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
RT_ASSERT(m != RT_NULL);
|
|
|
|
RT_ASSERT(rt_object_get_type(&m->parent) == RT_Object_Class_Memory);
|
|
|
|
RT_ASSERT(rt_object_is_systemobject(&m->parent));
|
2011-06-12 18:01:48 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem = (struct rt_small_mem *)m;
|
2012-12-25 16:27:29 +08:00
|
|
|
/* alignment size */
|
|
|
|
newsize = RT_ALIGN(newsize, RT_ALIGN_SIZE);
|
2021-12-16 16:23:58 +08:00
|
|
|
if (newsize > small_mem->mem_size_aligned)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
|
|
|
RT_DEBUG_LOG(RT_DEBUG_MEM, ("realloc: out of memory\n"));
|
2011-06-12 18:01:48 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
return RT_NULL;
|
|
|
|
}
|
2017-08-14 15:57:42 +08:00
|
|
|
else if (newsize == 0)
|
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_free(rmem);
|
2017-08-14 15:57:42 +08:00
|
|
|
return RT_NULL;
|
|
|
|
}
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
/* allocate a new memory block */
|
|
|
|
if (rmem == RT_NULL)
|
2021-12-16 16:23:58 +08:00
|
|
|
return rt_smem_alloc(&small_mem->parent, newsize);
|
2012-03-17 14:43:49 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
RT_ASSERT((((rt_ubase_t)rmem) & (RT_ALIGN_SIZE - 1)) == 0);
|
|
|
|
RT_ASSERT((rt_uint8_t *)rmem >= (rt_uint8_t *)small_mem->heap_ptr);
|
|
|
|
RT_ASSERT((rt_uint8_t *)rmem < (rt_uint8_t *)small_mem->heap_end);
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
mem = (struct rt_small_mem_item *)((rt_uint8_t *)rmem - SIZEOF_STRUCT_MEM);
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
/* current memory block size */
|
|
|
|
ptr = (rt_uint8_t *)mem - small_mem->heap_ptr;
|
2012-12-25 16:27:29 +08:00
|
|
|
size = mem->next - ptr - SIZEOF_STRUCT_MEM;
|
|
|
|
if (size == newsize)
|
|
|
|
{
|
|
|
|
/* the size is the same as */
|
|
|
|
return rmem;
|
|
|
|
}
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
if (newsize + SIZEOF_STRUCT_MEM + MIN_SIZE < size)
|
|
|
|
{
|
|
|
|
/* split memory block */
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->parent.used -= (size - newsize);
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
ptr2 = ptr + SIZEOF_STRUCT_MEM + newsize;
|
2021-12-16 16:23:58 +08:00
|
|
|
mem2 = (struct rt_small_mem_item *)&small_mem->heap_ptr[ptr2];
|
|
|
|
mem2->pool_ptr = MEM_FREED();
|
2012-12-25 16:27:29 +08:00
|
|
|
mem2->next = mem->next;
|
|
|
|
mem2->prev = ptr;
|
2017-12-22 14:29:14 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_setname(mem2, " ");
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* RT_USING_MEMTRACE */
|
2012-12-25 16:27:29 +08:00
|
|
|
mem->next = ptr2;
|
2021-12-16 16:23:58 +08:00
|
|
|
if (mem2->next != small_mem->mem_size_aligned + SIZEOF_STRUCT_MEM)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
((struct rt_small_mem_item *)&small_mem->heap_ptr[mem2->next])->prev = ptr2;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
2021-03-08 11:25:38 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
if (mem2 < small_mem->lfree)
|
2020-03-27 18:57:29 +08:00
|
|
|
{
|
|
|
|
/* the splited struct is now the lowest */
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->lfree = mem2;
|
2020-03-27 18:57:29 +08:00
|
|
|
}
|
2012-12-25 16:27:29 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
plug_holes(small_mem, mem2);
|
2012-12-25 16:27:29 +08:00
|
|
|
|
|
|
|
return rmem;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* expand memory */
|
2021-12-16 16:23:58 +08:00
|
|
|
nmem = rt_smem_alloc(&small_mem->parent, newsize);
|
2012-12-25 16:27:29 +08:00
|
|
|
if (nmem != RT_NULL) /* check memory */
|
|
|
|
{
|
2013-06-24 17:06:09 +08:00
|
|
|
rt_memcpy(nmem, rmem, size < newsize ? size : newsize);
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_free(rmem);
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return nmem;
|
2009-07-03 06:48:23 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
RTM_EXPORT(rt_smem_realloc);
|
2009-07-03 06:48:23 +08:00
|
|
|
|
|
|
|
/**
|
2021-09-10 15:55:08 +08:00
|
|
|
* @brief This function will release the previously allocated memory block by
|
2021-12-16 16:23:58 +08:00
|
|
|
* rt_mem_alloc. The released memory block is taken back to system heap.
|
2009-07-03 06:48:23 +08:00
|
|
|
*
|
2021-09-11 16:40:56 +08:00
|
|
|
* @param rmem the address of memory which will be released.
|
2009-07-03 06:48:23 +08:00
|
|
|
*/
|
2021-12-16 16:23:58 +08:00
|
|
|
void rt_smem_free(void *rmem)
|
2009-07-03 06:48:23 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
struct rt_small_mem_item *mem;
|
|
|
|
struct rt_small_mem *small_mem;
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
if (rmem == RT_NULL)
|
|
|
|
return;
|
2018-10-23 16:03:17 +08:00
|
|
|
|
2018-10-26 06:35:42 +08:00
|
|
|
RT_ASSERT((((rt_ubase_t)rmem) & (RT_ALIGN_SIZE - 1)) == 0);
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
/* Get the corresponding struct rt_small_mem_item ... */
|
|
|
|
mem = (struct rt_small_mem_item *)((rt_uint8_t *)rmem - SIZEOF_STRUCT_MEM);
|
2012-12-25 16:27:29 +08:00
|
|
|
/* ... which has to be in a used state ... */
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem = MEM_POOL(mem);
|
|
|
|
RT_ASSERT(small_mem != RT_NULL);
|
|
|
|
RT_ASSERT(MEM_ISUSED(mem));
|
|
|
|
RT_ASSERT(rt_object_get_type(&small_mem->parent.parent) == RT_Object_Class_Memory);
|
|
|
|
RT_ASSERT(rt_object_is_systemobject(&small_mem->parent.parent));
|
|
|
|
RT_ASSERT((rt_uint8_t *)rmem >= (rt_uint8_t *)small_mem->heap_ptr &&
|
|
|
|
(rt_uint8_t *)rmem < (rt_uint8_t *)small_mem->heap_end);
|
|
|
|
RT_ASSERT(MEM_POOL(&small_mem->heap_ptr[mem->next]) == small_mem);
|
|
|
|
|
2022-09-27 11:11:29 +08:00
|
|
|
RT_DEBUG_LOG(RT_DEBUG_MEM,
|
|
|
|
("release memory 0x%x, size: %d\n",
|
|
|
|
(rt_ubase_t)rmem,
|
|
|
|
(rt_ubase_t)(mem->next - ((rt_uint8_t *)mem - small_mem->heap_ptr))));
|
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
/* ... and is now unused. */
|
2021-12-16 16:23:58 +08:00
|
|
|
mem->pool_ptr = MEM_FREED();
|
2017-12-22 14:29:14 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_smem_setname(mem, " ");
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* RT_USING_MEMTRACE */
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
if (mem < small_mem->lfree)
|
2012-12-25 16:27:29 +08:00
|
|
|
{
|
|
|
|
/* the newly freed struct is now the lowest */
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->lfree = mem;
|
2012-12-25 16:27:29 +08:00
|
|
|
}
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
small_mem->parent.used -= (mem->next - ((rt_uint8_t *)mem - small_mem->heap_ptr));
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2012-12-25 16:27:29 +08:00
|
|
|
/* finally, see if prev or next are free also */
|
2021-12-16 16:23:58 +08:00
|
|
|
plug_holes(small_mem, mem);
|
2010-07-13 15:36:37 +08:00
|
|
|
}
|
2021-12-16 16:23:58 +08:00
|
|
|
RTM_EXPORT(rt_smem_free);
|
2009-12-25 20:18:53 +08:00
|
|
|
|
2009-07-03 06:48:23 +08:00
|
|
|
#ifdef RT_USING_FINSH
|
|
|
|
#include <finsh.h>
|
2012-12-25 16:27:29 +08:00
|
|
|
|
2017-12-22 14:29:14 +08:00
|
|
|
#ifdef RT_USING_MEMTRACE
|
2021-12-16 16:23:58 +08:00
|
|
|
int memcheck(int argc, char *argv[])
|
2017-12-22 14:29:14 +08:00
|
|
|
{
|
|
|
|
int position;
|
2022-04-20 10:56:11 +08:00
|
|
|
rt_base_t level;
|
2021-12-16 16:23:58 +08:00
|
|
|
struct rt_small_mem_item *mem;
|
|
|
|
struct rt_small_mem *m;
|
|
|
|
struct rt_object_information *information;
|
|
|
|
struct rt_list_node *node;
|
|
|
|
struct rt_object *object;
|
|
|
|
char *name;
|
|
|
|
|
|
|
|
name = argc > 1 ? argv[1] : RT_NULL;
|
2017-12-22 14:29:14 +08:00
|
|
|
level = rt_hw_interrupt_disable();
|
2021-12-16 16:23:58 +08:00
|
|
|
/* get mem object */
|
|
|
|
information = rt_object_get_information(RT_Object_Class_Memory);
|
|
|
|
for (node = information->object_list.next;
|
|
|
|
node != &(information->object_list);
|
|
|
|
node = node->next)
|
2017-12-22 14:29:14 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
object = rt_list_entry(node, struct rt_object, list);
|
|
|
|
/* find the specified object */
|
|
|
|
if (name != RT_NULL && rt_strncmp(name, object->name, RT_NAME_MAX) != 0)
|
|
|
|
continue;
|
|
|
|
/* mem object */
|
|
|
|
m = (struct rt_small_mem *)object;
|
|
|
|
/* check mem */
|
|
|
|
for (mem = (struct rt_small_mem_item *)m->heap_ptr; mem != m->heap_end; mem = (struct rt_small_mem_item *)&m->heap_ptr[mem->next])
|
|
|
|
{
|
|
|
|
position = (rt_ubase_t)mem - (rt_ubase_t)m->heap_ptr;
|
|
|
|
if (position < 0) goto __exit;
|
|
|
|
if (position > (int)m->mem_size_aligned) goto __exit;
|
|
|
|
if (MEM_POOL(mem) != m) goto __exit;
|
|
|
|
}
|
2017-12-22 14:29:14 +08:00
|
|
|
}
|
|
|
|
rt_hw_interrupt_enable(level);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
__exit:
|
|
|
|
rt_kprintf("Memory block wrong:\n");
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_kprintf(" name: %s\n", m->parent.parent.name);
|
2017-12-22 14:29:14 +08:00
|
|
|
rt_kprintf("address: 0x%08x\n", mem);
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_kprintf(" pool: 0x%04x\n", mem->pool_ptr);
|
|
|
|
rt_kprintf(" size: %d\n", mem->next - position - SIZEOF_STRUCT_MEM);
|
2017-12-22 14:29:14 +08:00
|
|
|
rt_hw_interrupt_enable(level);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
MSH_CMD_EXPORT(memcheck, check memory data);
|
|
|
|
|
2018-02-24 16:10:44 +08:00
|
|
|
int memtrace(int argc, char **argv)
|
2017-12-22 14:29:14 +08:00
|
|
|
{
|
2021-12-16 16:23:58 +08:00
|
|
|
struct rt_small_mem_item *mem;
|
|
|
|
struct rt_small_mem *m;
|
|
|
|
struct rt_object_information *information;
|
|
|
|
struct rt_list_node *node;
|
|
|
|
struct rt_object *object;
|
|
|
|
char *name;
|
|
|
|
|
|
|
|
name = argc > 1 ? argv[1] : RT_NULL;
|
|
|
|
/* get mem object */
|
|
|
|
information = rt_object_get_information(RT_Object_Class_Memory);
|
|
|
|
for (node = information->object_list.next;
|
|
|
|
node != &(information->object_list);
|
|
|
|
node = node->next)
|
|
|
|
{
|
|
|
|
object = rt_list_entry(node, struct rt_object, list);
|
|
|
|
/* find the specified object */
|
|
|
|
if (name != RT_NULL && rt_strncmp(name, object->name, RT_NAME_MAX) != 0)
|
|
|
|
continue;
|
|
|
|
/* mem object */
|
|
|
|
m = (struct rt_small_mem *)object;
|
|
|
|
/* show memory information */
|
|
|
|
rt_kprintf("\nmemory heap address:\n");
|
|
|
|
rt_kprintf("name : %s\n", m->parent.parent.name);
|
|
|
|
rt_kprintf("total : 0x%d\n", m->parent.total);
|
|
|
|
rt_kprintf("used : 0x%d\n", m->parent.used);
|
|
|
|
rt_kprintf("max_used: 0x%d\n", m->parent.max);
|
|
|
|
rt_kprintf("heap_ptr: 0x%08x\n", m->heap_ptr);
|
|
|
|
rt_kprintf("lfree : 0x%08x\n", m->lfree);
|
|
|
|
rt_kprintf("heap_end: 0x%08x\n", m->heap_end);
|
|
|
|
rt_kprintf("\n--memory item information --\n");
|
|
|
|
for (mem = (struct rt_small_mem_item *)m->heap_ptr; mem != m->heap_end; mem = (struct rt_small_mem_item *)&m->heap_ptr[mem->next])
|
|
|
|
{
|
|
|
|
int size = MEM_SIZE(m, mem);
|
2017-12-22 14:29:14 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_kprintf("[0x%08x - ", mem);
|
|
|
|
if (size < 1024)
|
|
|
|
rt_kprintf("%5d", size);
|
|
|
|
else if (size < 1024 * 1024)
|
|
|
|
rt_kprintf("%4dK", size / 1024);
|
|
|
|
else
|
|
|
|
rt_kprintf("%4dM", size / (1024 * 1024));
|
2017-12-22 14:29:14 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
rt_kprintf("] %c%c%c%c", mem->thread[0], mem->thread[1], mem->thread[2], mem->thread[3]);
|
|
|
|
if (MEM_POOL(mem) != m)
|
|
|
|
rt_kprintf(": ***\n");
|
|
|
|
else
|
|
|
|
rt_kprintf("\n");
|
|
|
|
}
|
2017-12-22 14:29:14 +08:00
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
MSH_CMD_EXPORT(memtrace, dump memory trace information);
|
2021-06-11 13:07:29 +08:00
|
|
|
#endif /* RT_USING_MEMTRACE */
|
|
|
|
#endif /* RT_USING_FINSH */
|
2017-12-22 14:29:14 +08:00
|
|
|
|
2021-12-16 16:23:58 +08:00
|
|
|
#endif /* defined (RT_USING_SMALL_MEM) */
|
2009-07-03 06:48:23 +08:00
|
|
|
|
2016-08-19 10:11:38 +08:00
|
|
|
/**@}*/
|