Merge pull request #9212 from polarvid/shell/opt-gctx
[libcpu] trimming ARM64 general context
This commit is contained in:
commit
e955e411b7
|
@ -106,18 +106,16 @@ int arch_set_thread_context(void (*exit)(void), void *new_thread_stack,
|
||||||
struct rt_hw_exp_stack *ori_syscall = rt_thread_self()->user_ctx.ctx;
|
struct rt_hw_exp_stack *ori_syscall = rt_thread_self()->user_ctx.ctx;
|
||||||
RT_ASSERT(ori_syscall != RT_NULL);
|
RT_ASSERT(ori_syscall != RT_NULL);
|
||||||
|
|
||||||
thread_frame = (void *)((long)new_thread_stack - sizeof(struct rt_hw_exp_stack));
|
new_thread_stack = (rt_ubase_t*)RT_ALIGN_DOWN((rt_ubase_t)new_thread_stack, 16);
|
||||||
syscall_frame = (void *)((long)new_thread_stack - 2 * sizeof(struct rt_hw_exp_stack));
|
|
||||||
|
|
||||||
|
syscall_frame = (void *)((long)new_thread_stack - sizeof(struct rt_hw_exp_stack));
|
||||||
memcpy(syscall_frame, ori_syscall, sizeof(*syscall_frame));
|
memcpy(syscall_frame, ori_syscall, sizeof(*syscall_frame));
|
||||||
syscall_frame->sp_el0 = (long)user_stack;
|
syscall_frame->sp_el0 = (long)user_stack;
|
||||||
syscall_frame->x0 = 0;
|
syscall_frame->x0 = 0;
|
||||||
|
|
||||||
thread_frame->cpsr = ((3 << 6) | 0x4 | 0x1);
|
thread_frame = (void *)rt_hw_stack_init(exit, RT_NULL, (void *)syscall_frame, RT_NULL);
|
||||||
thread_frame->pc = (long)exit;
|
|
||||||
thread_frame->x0 = 0;
|
|
||||||
|
|
||||||
*thread_sp = syscall_frame;
|
*thread_sp = thread_frame;
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -125,12 +125,15 @@ lwp_exec_user:
|
||||||
* since this routine reset the SP, we take it as a start point
|
* since this routine reset the SP, we take it as a start point
|
||||||
*/
|
*/
|
||||||
START_POINT(SVC_Handler)
|
START_POINT(SVC_Handler)
|
||||||
|
mov fp, xzr
|
||||||
|
mov lr, xzr
|
||||||
|
|
||||||
/* x0 is initial sp */
|
/* x0 is initial sp */
|
||||||
mov sp, x0
|
mov sp, x0
|
||||||
|
|
||||||
msr daifclr, #3 /* enable interrupt */
|
msr daifclr, #3 /* enable interrupt */
|
||||||
|
|
||||||
bl rt_thread_self
|
GET_THREAD_SELF x0
|
||||||
bl lwp_user_setting_save
|
bl lwp_user_setting_save
|
||||||
|
|
||||||
ldp x8, x9, [sp, #(CONTEXT_OFFSET_X8)]
|
ldp x8, x9, [sp, #(CONTEXT_OFFSET_X8)]
|
||||||
|
|
|
@ -85,11 +85,12 @@ rt_err_t rt_hw_backtrace_frame_unwind(rt_thread_t thread, struct rt_hw_backtrace
|
||||||
if (fp && !((long)fp & 0x7))
|
if (fp && !((long)fp & 0x7))
|
||||||
{
|
{
|
||||||
#ifdef RT_USING_SMART
|
#ifdef RT_USING_SMART
|
||||||
|
#define IN_USER_SPACE(addr) ((rt_ubase_t)(addr) >= USER_VADDR_START && (rt_ubase_t)(addr) < USER_VADDR_TOP)
|
||||||
if (thread && thread->lwp && rt_scheduler_is_available())
|
if (thread && thread->lwp && rt_scheduler_is_available())
|
||||||
{
|
{
|
||||||
rt_lwp_t lwp = thread->lwp;
|
rt_lwp_t lwp = thread->lwp;
|
||||||
void *this_lwp = lwp_self();
|
void *this_lwp = lwp_self();
|
||||||
if (this_lwp == lwp && rt_kmem_v2p(fp) != ARCH_MAP_FAILED)
|
if ((!IN_USER_SPACE(fp) || this_lwp == lwp) && rt_kmem_v2p(fp) != ARCH_MAP_FAILED)
|
||||||
{
|
{
|
||||||
rc = _bt_kaddr(fp, frame);
|
rc = _bt_kaddr(fp, frame);
|
||||||
}
|
}
|
||||||
|
@ -129,8 +130,8 @@ rt_err_t rt_hw_backtrace_frame_get(rt_thread_t thread, struct rt_hw_backtrace_fr
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
frame->pc = ARCH_CONTEXT_FETCH(thread->sp, 3);
|
frame->pc = ARCH_CONTEXT_FETCH(thread->sp, 0);
|
||||||
frame->fp = ARCH_CONTEXT_FETCH(thread->sp, 7);
|
frame->fp = ARCH_CONTEXT_FETCH(thread->sp, 4);
|
||||||
rc = RT_EOK;
|
rc = RT_EOK;
|
||||||
}
|
}
|
||||||
return rc;
|
return rc;
|
||||||
|
|
|
@ -23,4 +23,15 @@
|
||||||
.cfi_endproc; \
|
.cfi_endproc; \
|
||||||
.size name, .-name;
|
.size name, .-name;
|
||||||
|
|
||||||
|
.macro GET_THREAD_SELF, dst:req
|
||||||
|
#ifdef ARCH_USING_HW_THREAD_SELF
|
||||||
|
mrs x0, tpidr_el1
|
||||||
|
#else /* !ARCH_USING_HW_THREAD_SELF */
|
||||||
|
bl rt_thread_self
|
||||||
|
#endif /* ARCH_USING_HW_THREAD_SELF */
|
||||||
|
.if \dst != x0
|
||||||
|
mov dst, x0
|
||||||
|
.endif
|
||||||
|
.endm
|
||||||
|
|
||||||
#endif /* __ASM_GENERIC_H__ */
|
#endif /* __ASM_GENERIC_H__ */
|
||||||
|
|
|
@ -10,67 +10,61 @@
|
||||||
#ifndef __ARM64_INC_CONTEXT_H__
|
#ifndef __ARM64_INC_CONTEXT_H__
|
||||||
#define __ARM64_INC_CONTEXT_H__
|
#define __ARM64_INC_CONTEXT_H__
|
||||||
|
|
||||||
.macro SAVE_CONTEXT_SWITCH
|
#include "armv8.h"
|
||||||
|
|
||||||
|
.macro SAVE_CONTEXT_SWITCH, tmpx, tmp2x
|
||||||
/* Save the entire context. */
|
/* Save the entire context. */
|
||||||
SAVE_FPU sp
|
SAVE_FPU sp
|
||||||
stp x0, x1, [sp, #-0x10]!
|
|
||||||
stp x2, x3, [sp, #-0x10]!
|
stp x19, x20, [sp, #-0x10]!
|
||||||
stp x4, x5, [sp, #-0x10]!
|
stp x21, x22, [sp, #-0x10]!
|
||||||
stp x6, x7, [sp, #-0x10]!
|
stp x23, x24, [sp, #-0x10]!
|
||||||
stp x8, x9, [sp, #-0x10]!
|
stp x25, x26, [sp, #-0x10]!
|
||||||
stp x10, x11, [sp, #-0x10]!
|
stp x27, x28, [sp, #-0x10]!
|
||||||
stp x12, x13, [sp, #-0x10]!
|
|
||||||
stp x14, x15, [sp, #-0x10]!
|
mrs \tmpx, sp_el0
|
||||||
stp x16, x17, [sp, #-0x10]!
|
stp x29, \tmpx, [sp, #-0x10]!
|
||||||
stp x18, x19, [sp, #-0x10]!
|
|
||||||
stp x20, x21, [sp, #-0x10]!
|
mrs \tmpx, fpcr
|
||||||
stp x22, x23, [sp, #-0x10]!
|
mrs \tmp2x, fpsr
|
||||||
stp x24, x25, [sp, #-0x10]!
|
stp \tmpx, \tmp2x, [sp, #-0x10]!
|
||||||
stp x26, x27, [sp, #-0x10]!
|
|
||||||
stp x28, x29, [sp, #-0x10]!
|
mov \tmpx, #((3 << 6) | 0x5) /* el1h, disable interrupt */
|
||||||
mrs x28, fpcr
|
stp x30, \tmpx, [sp, #-0x10]!
|
||||||
mrs x29, fpsr
|
|
||||||
stp x28, x29, [sp, #-0x10]!
|
.endm
|
||||||
mrs x29, sp_el0
|
|
||||||
stp x29, x30, [sp, #-0x10]!
|
.macro SAVE_CONTEXT_SWITCH_FAST
|
||||||
|
/* Save the entire context. */
|
||||||
|
add sp, sp, #-1 * CONTEXT_FPU_SIZE
|
||||||
|
|
||||||
|
add sp, sp, #-7 * 16
|
||||||
|
|
||||||
mov x19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
|
mov x19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
|
||||||
mov x18, x30
|
stp lr, x19, [sp, #-0x10]!
|
||||||
|
|
||||||
stp x18, x19, [sp, #-0x10]!
|
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
.macro _RESTORE_CONTEXT_SWITCH
|
.macro _RESTORE_CONTEXT_SWITCH
|
||||||
ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */
|
ldp x30, x19, [sp], #0x10 /* SPSR and ELR. */
|
||||||
|
msr elr_el1, x30
|
||||||
|
msr spsr_el1, x19
|
||||||
|
|
||||||
tst x3, #0x1f
|
|
||||||
msr spsr_el1, x3
|
|
||||||
msr elr_el1, x2
|
|
||||||
|
|
||||||
ldp x29, x30, [sp], #0x10
|
/* restore NEON */
|
||||||
msr sp_el0, x29
|
ldp x19, x20, [sp], #0x10
|
||||||
ldp x28, x29, [sp], #0x10
|
msr fpcr, x19
|
||||||
msr fpcr, x28
|
msr fpsr, x20
|
||||||
msr fpsr, x29
|
|
||||||
ldp x28, x29, [sp], #0x10
|
ldp x29, x19, [sp], #0x10
|
||||||
ldp x26, x27, [sp], #0x10
|
msr sp_el0, x19
|
||||||
ldp x24, x25, [sp], #0x10
|
ldp x27, x28, [sp], #0x10
|
||||||
ldp x22, x23, [sp], #0x10
|
ldp x25, x26, [sp], #0x10
|
||||||
ldp x20, x21, [sp], #0x10
|
ldp x23, x24, [sp], #0x10
|
||||||
ldp x18, x19, [sp], #0x10
|
ldp x21, x22, [sp], #0x10
|
||||||
ldp x16, x17, [sp], #0x10
|
ldp x19, x20, [sp], #0x10
|
||||||
ldp x14, x15, [sp], #0x10
|
|
||||||
ldp x12, x13, [sp], #0x10
|
|
||||||
ldp x10, x11, [sp], #0x10
|
|
||||||
ldp x8, x9, [sp], #0x10
|
|
||||||
ldp x6, x7, [sp], #0x10
|
|
||||||
ldp x4, x5, [sp], #0x10
|
|
||||||
ldp x2, x3, [sp], #0x10
|
|
||||||
ldp x0, x1, [sp], #0x10
|
|
||||||
RESTORE_FPU sp
|
RESTORE_FPU sp
|
||||||
#ifdef RT_USING_SMART
|
|
||||||
beq arch_ret_to_user
|
|
||||||
#endif
|
|
||||||
eret
|
eret
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
|
|
|
@ -35,13 +35,17 @@
|
||||||
rt_hw_context_switch_to:
|
rt_hw_context_switch_to:
|
||||||
ldr x0, [x0]
|
ldr x0, [x0]
|
||||||
mov sp, x0
|
mov sp, x0
|
||||||
mov x0, x1
|
|
||||||
|
/* reserved to_thread */
|
||||||
|
mov x19, x1
|
||||||
|
|
||||||
|
mov x0, x19
|
||||||
bl rt_cpus_lock_status_restore
|
bl rt_cpus_lock_status_restore
|
||||||
#ifdef RT_USING_SMART
|
#ifdef RT_USING_SMART
|
||||||
bl rt_thread_self
|
mov x0, x19
|
||||||
bl lwp_user_setting_restore
|
bl lwp_user_setting_restore
|
||||||
#endif
|
#endif
|
||||||
b rt_hw_context_switch_exit
|
b _context_switch_exit
|
||||||
|
|
||||||
.globl rt_hw_context_switch
|
.globl rt_hw_context_switch
|
||||||
|
|
||||||
|
@ -53,7 +57,7 @@ to, struct rt_thread *to_thread);
|
||||||
* X2 --> to_thread
|
* X2 --> to_thread
|
||||||
*/
|
*/
|
||||||
rt_hw_context_switch:
|
rt_hw_context_switch:
|
||||||
SAVE_CONTEXT_SWITCH
|
SAVE_CONTEXT_SWITCH x19, x20
|
||||||
mov x3, sp
|
mov x3, sp
|
||||||
str x3, [x0] // store sp in preempted tasks TCB
|
str x3, [x0] // store sp in preempted tasks TCB
|
||||||
ldr x0, [x1] // get new task stack pointer
|
ldr x0, [x1] // get new task stack pointer
|
||||||
|
@ -68,10 +72,15 @@ rt_hw_context_switch:
|
||||||
mov x0, x19
|
mov x0, x19
|
||||||
bl lwp_user_setting_restore
|
bl lwp_user_setting_restore
|
||||||
#endif
|
#endif
|
||||||
b rt_hw_context_switch_exit
|
b _context_switch_exit
|
||||||
|
|
||||||
|
.globl rt_hw_irq_exit
|
||||||
.globl rt_hw_context_switch_interrupt
|
.globl rt_hw_context_switch_interrupt
|
||||||
|
|
||||||
|
#define EXP_FRAME x19
|
||||||
|
#define FROM_SPP x20
|
||||||
|
#define TO_SPP x21
|
||||||
|
#define TO_TCB x22
|
||||||
/*
|
/*
|
||||||
* void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
|
* void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
|
||||||
* X0 :interrupt context
|
* X0 :interrupt context
|
||||||
|
@ -80,30 +89,45 @@ rt_hw_context_switch:
|
||||||
* X3 :to_thread's tcb
|
* X3 :to_thread's tcb
|
||||||
*/
|
*/
|
||||||
rt_hw_context_switch_interrupt:
|
rt_hw_context_switch_interrupt:
|
||||||
stp x0, x1, [sp, #-0x10]!
|
#ifdef RT_USING_DEBUG
|
||||||
stp x2, x3, [sp, #-0x10]!
|
/* debug frame for backtrace */
|
||||||
stp x29, x30, [sp, #-0x10]!
|
stp x29, x30, [sp, #-0x10]!
|
||||||
#ifdef RT_USING_SMART
|
#endif /* RT_USING_DEBUG */
|
||||||
bl rt_thread_self
|
|
||||||
bl lwp_user_setting_save
|
|
||||||
#endif
|
|
||||||
ldp x29, x30, [sp], #0x10
|
|
||||||
ldp x2, x3, [sp], #0x10
|
|
||||||
ldp x0, x1, [sp], #0x10
|
|
||||||
str x0, [x1]
|
|
||||||
ldr x0, [x2]
|
|
||||||
mov sp, x0
|
|
||||||
mov x0, x3
|
|
||||||
mov x19, x0
|
|
||||||
bl rt_cpus_lock_status_restore
|
|
||||||
mov x0, x19
|
|
||||||
#ifdef RT_USING_SMART
|
|
||||||
bl lwp_user_setting_restore
|
|
||||||
#endif
|
|
||||||
b rt_hw_context_switch_exit
|
|
||||||
|
|
||||||
.global rt_hw_context_switch_exit
|
/* we can discard all the previous ABI here */
|
||||||
rt_hw_context_switch_exit:
|
mov EXP_FRAME, x0
|
||||||
clrex
|
mov FROM_SPP, x1
|
||||||
|
mov TO_SPP, x2
|
||||||
|
mov TO_TCB, x3
|
||||||
|
|
||||||
|
#ifdef RT_USING_SMART
|
||||||
|
GET_THREAD_SELF x0
|
||||||
|
bl lwp_user_setting_save
|
||||||
|
#endif /* RT_USING_SMART */
|
||||||
|
|
||||||
|
/* reset SP of from-thread */
|
||||||
|
mov sp, EXP_FRAME
|
||||||
|
|
||||||
|
/* push context for swtich */
|
||||||
|
adr lr, rt_hw_irq_exit
|
||||||
|
SAVE_CONTEXT_SWITCH_FAST
|
||||||
|
|
||||||
|
/* save SP of from-thread */
|
||||||
mov x0, sp
|
mov x0, sp
|
||||||
|
str x0, [FROM_SPP]
|
||||||
|
|
||||||
|
/* setup SP to to-thread's */
|
||||||
|
ldr x0, [TO_SPP]
|
||||||
|
mov sp, x0
|
||||||
|
|
||||||
|
mov x0, TO_TCB
|
||||||
|
bl rt_cpus_lock_status_restore
|
||||||
|
#ifdef RT_USING_SMART
|
||||||
|
mov x0, TO_TCB
|
||||||
|
bl lwp_user_setting_restore
|
||||||
|
#endif /* RT_USING_SMART */
|
||||||
|
b _context_switch_exit
|
||||||
|
|
||||||
|
_context_switch_exit:
|
||||||
|
clrex
|
||||||
RESTORE_CONTEXT_SWITCH
|
RESTORE_CONTEXT_SWITCH
|
||||||
|
|
|
@ -20,16 +20,10 @@
|
||||||
#include <armv8.h>
|
#include <armv8.h>
|
||||||
|
|
||||||
.macro RESTORE_CONTEXT_SWITCH
|
.macro RESTORE_CONTEXT_SWITCH
|
||||||
/* Set the SP to point to the stack of the task being restored. */
|
|
||||||
mov sp, x0
|
|
||||||
|
|
||||||
_RESTORE_CONTEXT_SWITCH
|
_RESTORE_CONTEXT_SWITCH
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
.macro RESTORE_IRQ_CONTEXT
|
.macro RESTORE_IRQ_CONTEXT
|
||||||
/* Set the SP to point to the stack of the task being restored. */
|
|
||||||
mov sp, x0
|
|
||||||
|
|
||||||
ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */
|
ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */
|
||||||
|
|
||||||
tst x3, #0x1f
|
tst x3, #0x1f
|
||||||
|
|
|
@ -15,10 +15,14 @@
|
||||||
#include "../include/vector_gcc.h"
|
#include "../include/vector_gcc.h"
|
||||||
#include "context_gcc.h"
|
#include "context_gcc.h"
|
||||||
|
|
||||||
|
.section .text
|
||||||
|
|
||||||
.globl vector_fiq
|
.globl vector_fiq
|
||||||
vector_fiq:
|
vector_fiq:
|
||||||
b .
|
b .
|
||||||
|
|
||||||
|
.globl rt_hw_irq_exit
|
||||||
|
|
||||||
START_POINT(vector_irq)
|
START_POINT(vector_irq)
|
||||||
SAVE_IRQ_CONTEXT
|
SAVE_IRQ_CONTEXT
|
||||||
stp x0, x1, [sp, #-0x10]! /* X0 is thread sp */
|
stp x0, x1, [sp, #-0x10]! /* X0 is thread sp */
|
||||||
|
@ -42,7 +46,7 @@ START_POINT(vector_irq)
|
||||||
ldp x0, x1, [sp], #0x10
|
ldp x0, x1, [sp], #0x10
|
||||||
bl rt_scheduler_do_irq_switch
|
bl rt_scheduler_do_irq_switch
|
||||||
|
|
||||||
mov x0, sp
|
rt_hw_irq_exit:
|
||||||
RESTORE_IRQ_CONTEXT
|
RESTORE_IRQ_CONTEXT
|
||||||
|
|
||||||
START_POINT_END(vector_irq)
|
START_POINT_END(vector_irq)
|
||||||
|
|
|
@ -41,43 +41,21 @@ rt_uint8_t *rt_hw_stack_init(void *tentry, void *parameter,
|
||||||
*(rt_uint128_t *)stk = (rt_uint128_t) { 0 };
|
*(rt_uint128_t *)stk = (rt_uint128_t) { 0 };
|
||||||
}
|
}
|
||||||
|
|
||||||
*(--stk) = (rt_ubase_t)0; /* X1 */
|
|
||||||
*(--stk) = (rt_ubase_t)parameter; /* X0 */
|
|
||||||
*(--stk) = (rt_ubase_t)3; /* X3 */
|
|
||||||
*(--stk) = (rt_ubase_t)2; /* X2 */
|
|
||||||
*(--stk) = (rt_ubase_t)5; /* X5 */
|
|
||||||
*(--stk) = (rt_ubase_t)4; /* X4 */
|
|
||||||
*(--stk) = (rt_ubase_t)7; /* X7 */
|
|
||||||
*(--stk) = (rt_ubase_t)6; /* X6 */
|
|
||||||
*(--stk) = (rt_ubase_t)9; /* X9 */
|
|
||||||
*(--stk) = (rt_ubase_t)8; /* X8 */
|
|
||||||
*(--stk) = (rt_ubase_t)11; /* X11 */
|
|
||||||
*(--stk) = (rt_ubase_t)10; /* X10 */
|
|
||||||
*(--stk) = (rt_ubase_t)13; /* X13 */
|
|
||||||
*(--stk) = (rt_ubase_t)12; /* X12 */
|
|
||||||
*(--stk) = (rt_ubase_t)15; /* X15 */
|
|
||||||
*(--stk) = (rt_ubase_t)14; /* X14 */
|
|
||||||
*(--stk) = (rt_ubase_t)17; /* X17 */
|
|
||||||
*(--stk) = (rt_ubase_t)16; /* X16 */
|
|
||||||
*(--stk) = (rt_ubase_t)tentry; /* X19, 1st param */
|
|
||||||
*(--stk) = (rt_ubase_t)18; /* X18 */
|
|
||||||
*(--stk) = (rt_ubase_t)21; /* X21 */
|
|
||||||
*(--stk) = (rt_ubase_t)texit; /* X20, 2nd param */
|
*(--stk) = (rt_ubase_t)texit; /* X20, 2nd param */
|
||||||
*(--stk) = (rt_ubase_t)23; /* X23 */
|
*(--stk) = (rt_ubase_t)tentry; /* X19, 1st param */
|
||||||
*(--stk) = (rt_ubase_t)22; /* X22 */
|
*(--stk) = (rt_ubase_t)22; /* X22 */
|
||||||
*(--stk) = (rt_ubase_t)25; /* X25 */
|
*(--stk) = (rt_ubase_t)parameter; /* X21, 3rd param */
|
||||||
*(--stk) = (rt_ubase_t)24; /* X24 */
|
*(--stk) = (rt_ubase_t)24; /* X24 */
|
||||||
*(--stk) = (rt_ubase_t)27; /* X27 */
|
*(--stk) = (rt_ubase_t)23; /* X23 */
|
||||||
*(--stk) = (rt_ubase_t)26; /* X26 */
|
*(--stk) = (rt_ubase_t)26; /* X26 */
|
||||||
*(--stk) = (rt_ubase_t)0; /* X29 - addr 0 as AAPCS64 specified */
|
*(--stk) = (rt_ubase_t)25; /* X25 */
|
||||||
*(--stk) = (rt_ubase_t)28; /* X28 */
|
*(--stk) = (rt_ubase_t)28; /* X28 */
|
||||||
|
*(--stk) = (rt_ubase_t)27; /* X27 */
|
||||||
|
*(--stk) = (rt_ubase_t)0; /* sp_el0 */
|
||||||
|
*(--stk) = (rt_ubase_t)0; /* X29 - addr 0 as AAPCS64 specified */
|
||||||
*(--stk) = (rt_ubase_t)0; /* FPSR */
|
*(--stk) = (rt_ubase_t)0; /* FPSR */
|
||||||
*(--stk) = (rt_ubase_t)0; /* FPCR */
|
*(--stk) = (rt_ubase_t)0; /* FPCR */
|
||||||
*(--stk) = (rt_ubase_t)0; /* X30 - procedure call link register. */
|
*(--stk) = INITIAL_SPSR_EL1; /* Save Processor States */
|
||||||
*(--stk) = (rt_ubase_t)0; /* sp_el0 */
|
|
||||||
|
|
||||||
*(--stk) = INITIAL_SPSR_EL1;
|
|
||||||
|
|
||||||
*(--stk) = (rt_ubase_t)_thread_start; /* Exception return address. */
|
*(--stk) = (rt_ubase_t)_thread_start; /* Exception return address. */
|
||||||
|
|
||||||
/* return task's current stack address */
|
/* return task's current stack address */
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
.section .text
|
.section .text
|
||||||
|
|
||||||
START_POINT(_thread_start)
|
START_POINT(_thread_start)
|
||||||
|
mov x0, x21
|
||||||
blr x19
|
blr x19
|
||||||
mov x29, #0
|
mov x29, #0
|
||||||
blr x20
|
blr x20
|
||||||
|
|
|
@ -44,7 +44,7 @@ rt_thread_switch_interrupt_flag:
|
||||||
rt_hw_context_switch_to:
|
rt_hw_context_switch_to:
|
||||||
clrex
|
clrex
|
||||||
ldr x0, [x0]
|
ldr x0, [x0]
|
||||||
RESTORE_CONTEXT_SWITCH
|
RESTORE_CONTEXT_SWITCH x0
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
|
* void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
|
||||||
|
@ -55,23 +55,23 @@ rt_hw_context_switch_to:
|
||||||
.globl rt_hw_context_switch
|
.globl rt_hw_context_switch
|
||||||
rt_hw_context_switch:
|
rt_hw_context_switch:
|
||||||
clrex
|
clrex
|
||||||
SAVE_CONTEXT_SWITCH
|
SAVE_CONTEXT_SWITCH x19, x20
|
||||||
|
|
||||||
mov x2, sp
|
mov x2, sp
|
||||||
str x2, [x0] // store sp in preempted tasks TCB
|
str x2, [x0] // store sp in preempted tasks TCB
|
||||||
ldr x0, [x1] // get new task stack pointer
|
ldr x0, [x1] // get new task stack pointer
|
||||||
|
|
||||||
RESTORE_CONTEXT_SWITCH
|
RESTORE_CONTEXT_SWITCH x0
|
||||||
|
|
||||||
/*
|
|
||||||
* void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
|
|
||||||
*/
|
|
||||||
.globl rt_thread_switch_interrupt_flag
|
.globl rt_thread_switch_interrupt_flag
|
||||||
.globl rt_interrupt_from_thread
|
.globl rt_interrupt_from_thread
|
||||||
.globl rt_interrupt_to_thread
|
.globl rt_interrupt_to_thread
|
||||||
.globl rt_hw_context_switch_interrupt
|
.globl rt_hw_context_switch_interrupt
|
||||||
|
|
||||||
|
/*
|
||||||
|
* void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
|
||||||
|
*/
|
||||||
rt_hw_context_switch_interrupt:
|
rt_hw_context_switch_interrupt:
|
||||||
clrex
|
|
||||||
ldr x6, =rt_thread_switch_interrupt_flag
|
ldr x6, =rt_thread_switch_interrupt_flag
|
||||||
ldr x7, [x6]
|
ldr x7, [x6]
|
||||||
cmp x7, #1
|
cmp x7, #1
|
||||||
|
@ -95,3 +95,23 @@ _reswitch:
|
||||||
ldr x6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
|
ldr x6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
|
||||||
str x1, [x6]
|
str x1, [x6]
|
||||||
ret
|
ret
|
||||||
|
|
||||||
|
.globl rt_hw_context_switch_interrupt_do
|
||||||
|
|
||||||
|
/**
|
||||||
|
* rt_hw_context_switch_interrupt_do(void)
|
||||||
|
*/
|
||||||
|
rt_hw_context_switch_interrupt_do:
|
||||||
|
clrex
|
||||||
|
SAVE_CONTEXT_SWITCH_FAST
|
||||||
|
|
||||||
|
ldr x3, =rt_interrupt_from_thread
|
||||||
|
ldr x4, [x3]
|
||||||
|
mov x0, sp
|
||||||
|
str x0, [x4] // store sp in preempted tasks's tcb
|
||||||
|
|
||||||
|
ldr x3, =rt_interrupt_to_thread
|
||||||
|
ldr x4, [x3]
|
||||||
|
ldr x0, [x4] // get new task's stack pointer
|
||||||
|
|
||||||
|
RESTORE_CONTEXT_SWITCH x0
|
||||||
|
|
|
@ -19,9 +19,9 @@
|
||||||
#include <asm-fpu.h>
|
#include <asm-fpu.h>
|
||||||
#include <armv8.h>
|
#include <armv8.h>
|
||||||
|
|
||||||
.macro RESTORE_CONTEXT_SWITCH
|
.macro RESTORE_CONTEXT_SWITCH using_sp
|
||||||
/* Set the SP to point to the stack of the task being restored. */
|
/* Set the SP to point to the stack of the task being restored. */
|
||||||
mov sp, x0
|
mov sp, \using_sp
|
||||||
|
|
||||||
#ifdef RT_USING_SMART
|
#ifdef RT_USING_SMART
|
||||||
bl rt_thread_self
|
bl rt_thread_self
|
||||||
|
@ -34,8 +34,6 @@
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
.macro RESTORE_IRQ_CONTEXT
|
.macro RESTORE_IRQ_CONTEXT
|
||||||
/* Set the SP to point to the stack of the task being restored. */
|
|
||||||
MOV SP, X0
|
|
||||||
#ifdef RT_USING_SMART
|
#ifdef RT_USING_SMART
|
||||||
BL rt_thread_self
|
BL rt_thread_self
|
||||||
MOV X19, X0
|
MOV X19, X0
|
||||||
|
|
|
@ -26,9 +26,7 @@
|
||||||
.globl vector_fiq
|
.globl vector_fiq
|
||||||
vector_fiq:
|
vector_fiq:
|
||||||
SAVE_IRQ_CONTEXT
|
SAVE_IRQ_CONTEXT
|
||||||
stp x0, x1, [sp, #-0x10]!
|
|
||||||
bl rt_hw_trap_fiq
|
bl rt_hw_trap_fiq
|
||||||
ldp x0, x1, [sp], #0x10
|
|
||||||
RESTORE_IRQ_CONTEXT
|
RESTORE_IRQ_CONTEXT
|
||||||
|
|
||||||
.globl rt_interrupt_enter
|
.globl rt_interrupt_enter
|
||||||
|
@ -36,19 +34,17 @@ vector_fiq:
|
||||||
.globl rt_thread_switch_interrupt_flag
|
.globl rt_thread_switch_interrupt_flag
|
||||||
.globl rt_interrupt_from_thread
|
.globl rt_interrupt_from_thread
|
||||||
.globl rt_interrupt_to_thread
|
.globl rt_interrupt_to_thread
|
||||||
|
.globl rt_hw_context_switch_interrupt_do
|
||||||
|
|
||||||
.align 8
|
.align 8
|
||||||
.globl vector_irq
|
.globl vector_irq
|
||||||
vector_irq:
|
vector_irq:
|
||||||
SAVE_IRQ_CONTEXT
|
SAVE_IRQ_CONTEXT
|
||||||
stp x0, x1, [sp, #-0x10]! /* X0 is thread sp */
|
|
||||||
|
|
||||||
bl rt_interrupt_enter
|
bl rt_interrupt_enter
|
||||||
bl rt_hw_trap_irq
|
bl rt_hw_trap_irq
|
||||||
bl rt_interrupt_leave
|
bl rt_interrupt_leave
|
||||||
|
|
||||||
ldp x0, x1, [sp], #0x10
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* if rt_thread_switch_interrupt_flag set, jump to
|
* if rt_thread_switch_interrupt_flag set, jump to
|
||||||
* rt_hw_context_switch_interrupt_do and don't return
|
* rt_hw_context_switch_interrupt_do and don't return
|
||||||
|
@ -61,15 +57,7 @@ vector_irq:
|
||||||
mov x2, #0 // clear flag
|
mov x2, #0 // clear flag
|
||||||
str x2, [x1]
|
str x2, [x1]
|
||||||
|
|
||||||
ldr x3, =rt_interrupt_from_thread
|
bl rt_hw_context_switch_interrupt_do
|
||||||
ldr x4, [x3]
|
|
||||||
str x0, [x4] // store sp in preempted tasks's tcb
|
|
||||||
|
|
||||||
ldr x3, =rt_interrupt_to_thread
|
|
||||||
ldr x4, [x3]
|
|
||||||
ldr x0, [x4] // get new task's stack pointer
|
|
||||||
|
|
||||||
RESTORE_IRQ_CONTEXT
|
|
||||||
|
|
||||||
vector_irq_exit:
|
vector_irq_exit:
|
||||||
RESTORE_IRQ_CONTEXT_WITHOUT_MMU_SWITCH
|
RESTORE_IRQ_CONTEXT_WITHOUT_MMU_SWITCH
|
||||||
|
|
Loading…
Reference in New Issue