2022-12-03 12:07:44 +08:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2006-2018, RT-Thread Development Team
|
|
|
|
*
|
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
|
|
*
|
|
|
|
* Change Logs:
|
|
|
|
* Date Author Notes
|
|
|
|
* 2021-05-06 Jesven first version
|
|
|
|
*/
|
|
|
|
#include <rthw.h>
|
|
|
|
#include <rtthread.h>
|
|
|
|
|
2023-02-20 13:44:20 +08:00
|
|
|
#include <mmu.h>
|
|
|
|
#include <mm_aspace.h>
|
2022-12-03 12:07:44 +08:00
|
|
|
#include <ioremap.h>
|
|
|
|
|
2023-02-20 13:44:20 +08:00
|
|
|
void *rt_ioremap_start;
|
|
|
|
size_t rt_ioremap_size;
|
|
|
|
|
2023-01-09 10:08:55 +08:00
|
|
|
#ifdef RT_USING_SMART
|
2023-02-20 13:44:20 +08:00
|
|
|
#include <lwp_mm.h>
|
2023-12-16 18:08:11 +08:00
|
|
|
#endif
|
|
|
|
|
2023-01-09 10:08:55 +08:00
|
|
|
#define DBG_TAG "mm.ioremap"
|
|
|
|
#define DBG_LVL DBG_LOG
|
|
|
|
#include <rtdbg.h>
|
2022-12-03 12:07:44 +08:00
|
|
|
|
2023-01-09 10:08:55 +08:00
|
|
|
enum ioremap_type
|
2022-12-03 12:07:44 +08:00
|
|
|
{
|
2023-01-09 10:08:55 +08:00
|
|
|
MM_AREA_TYPE_PHY,
|
2023-03-16 10:27:16 +08:00
|
|
|
MM_AREA_TYPE_PHY_WT,
|
2023-01-09 10:08:55 +08:00
|
|
|
MM_AREA_TYPE_PHY_CACHED
|
|
|
|
};
|
2022-12-03 12:07:44 +08:00
|
|
|
|
2023-01-09 10:08:55 +08:00
|
|
|
static void *_ioremap_type(void *paddr, size_t size, enum ioremap_type type)
|
2022-12-03 12:07:44 +08:00
|
|
|
{
|
2023-04-22 23:59:11 +08:00
|
|
|
char *v_addr = NULL;
|
2022-12-03 12:07:44 +08:00
|
|
|
size_t attr;
|
2023-01-09 10:08:55 +08:00
|
|
|
size_t lo_off;
|
|
|
|
int err;
|
|
|
|
|
2023-04-22 23:59:11 +08:00
|
|
|
lo_off = (rt_ubase_t)paddr & ARCH_PAGE_MASK;
|
2023-01-09 10:08:55 +08:00
|
|
|
|
|
|
|
struct rt_mm_va_hint hint = {
|
2023-02-20 13:44:20 +08:00
|
|
|
.prefer = RT_NULL,
|
2023-01-09 10:08:55 +08:00
|
|
|
.map_size = RT_ALIGN(size + lo_off, ARCH_PAGE_SIZE),
|
|
|
|
.flags = 0,
|
|
|
|
.limit_start = rt_ioremap_start,
|
|
|
|
.limit_range_size = rt_ioremap_size,
|
|
|
|
};
|
2022-12-03 12:07:44 +08:00
|
|
|
|
|
|
|
switch (type)
|
|
|
|
{
|
|
|
|
case MM_AREA_TYPE_PHY:
|
|
|
|
attr = MMU_MAP_K_DEVICE;
|
|
|
|
break;
|
2023-03-16 10:27:16 +08:00
|
|
|
case MM_AREA_TYPE_PHY_WT:
|
|
|
|
attr = MMU_MAP_K_RW;
|
|
|
|
break;
|
2022-12-03 12:07:44 +08:00
|
|
|
case MM_AREA_TYPE_PHY_CACHED:
|
|
|
|
attr = MMU_MAP_K_RWCB;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return v_addr;
|
|
|
|
}
|
2023-04-22 23:59:11 +08:00
|
|
|
err = rt_aspace_map_phy(&rt_kernel_space, &hint, attr, MM_PA_TO_OFF(paddr), (void **)&v_addr);
|
2022-12-03 12:07:44 +08:00
|
|
|
|
2023-01-09 10:08:55 +08:00
|
|
|
if (err)
|
2022-12-03 12:07:44 +08:00
|
|
|
{
|
2023-03-16 10:26:55 +08:00
|
|
|
LOG_W("IOREMAP 0x%lx failed %d\n", paddr, err);
|
2023-01-09 10:08:55 +08:00
|
|
|
v_addr = NULL;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
v_addr = v_addr + lo_off;
|
2022-12-03 12:07:44 +08:00
|
|
|
}
|
|
|
|
return v_addr;
|
|
|
|
}
|
|
|
|
|
2023-08-02 12:48:24 +08:00
|
|
|
rt_weak void *rt_ioremap_early(void *paddr, size_t size)
|
|
|
|
{
|
|
|
|
if (!size)
|
|
|
|
{
|
|
|
|
return RT_NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
return paddr;
|
|
|
|
}
|
|
|
|
|
2022-12-03 12:07:44 +08:00
|
|
|
void *rt_ioremap(void *paddr, size_t size)
|
|
|
|
{
|
|
|
|
return _ioremap_type(paddr, size, MM_AREA_TYPE_PHY);
|
|
|
|
}
|
|
|
|
|
|
|
|
void *rt_ioremap_nocache(void *paddr, size_t size)
|
|
|
|
{
|
|
|
|
return _ioremap_type(paddr, size, MM_AREA_TYPE_PHY);
|
|
|
|
}
|
|
|
|
|
2023-03-16 10:27:16 +08:00
|
|
|
void *rt_ioremap_wt(void *paddr, size_t size)
|
|
|
|
{
|
|
|
|
return _ioremap_type(paddr, size, MM_AREA_TYPE_PHY_WT);
|
|
|
|
}
|
|
|
|
|
2022-12-03 12:07:44 +08:00
|
|
|
void *rt_ioremap_cached(void *paddr, size_t size)
|
|
|
|
{
|
|
|
|
return _ioremap_type(paddr, size, MM_AREA_TYPE_PHY_CACHED);
|
|
|
|
}
|
|
|
|
|
|
|
|
void rt_iounmap(volatile void *vaddr)
|
|
|
|
{
|
2023-03-30 08:25:15 +08:00
|
|
|
rt_aspace_unmap(&rt_kernel_space, (void *)vaddr);
|
2022-12-03 12:07:44 +08:00
|
|
|
}
|
|
|
|
|