feat: smart: user space context optimization

This patch optimizes the user-space context handling in the ARM64
architecture, specifically improving how the context is saved and
restored during system calls and interrupts. The changes make the
code more efficient and easier to maintain, while ensuring proper
preservation of user context during system transitions.

Changes:
- Introduced a parameter for context saving to improve flexibility.
- Replaced hardcoded stack pointer operations with frame-relative
  references for better readability and code reuse.
- Simplified context restoration, removing redundant operations like
  loading/storing floating-point registers.

Signed-off-by: Shell <smokewood@qq.com>
This commit is contained in:
Shell 2024-09-14 10:52:46 +08:00 committed by Meco Man
parent 210cd71128
commit 9a27de92ae
5 changed files with 134 additions and 193 deletions

View File

@ -17,6 +17,7 @@
#include <rtconfig.h> #include <rtconfig.h>
#include <asm-generic.h> #include <asm-generic.h>
#include <asm-fpu.h> #include <asm-fpu.h>
#include <vector_gcc.h>
#include <armv8.h> #include <armv8.h>
#include <lwp_arch.h> #include <lwp_arch.h>
@ -105,6 +106,7 @@ arch_get_user_sp:
.global arch_clone_exit .global arch_clone_exit
arch_fork_exit: arch_fork_exit:
arch_clone_exit: arch_clone_exit:
mov x0, xzr
b arch_syscall_exit b arch_syscall_exit
/* /*
@ -131,12 +133,31 @@ START_POINT(SVC_Handler)
/* x0 is initial sp */ /* x0 is initial sp */
mov sp, x0 mov sp, x0
bl _SVC_Handler
/* jump explictly, make this code position independant */
b arch_syscall_exit
START_POINT_END(SVC_Handler)
TRACE_SYMBOL(_SVC_Handler)
#define FRAME_REG x19
/**
* x0 -> frame_addr
*/
_SVC_Handler:
.local _SVC_Handler
stp fp, lr, [sp, -16]!
mov fp, sp
mov FRAME_REG, x0 /* save the value of frame address */
msr daifclr, #3 /* enable interrupt */ msr daifclr, #3 /* enable interrupt */
GET_THREAD_SELF x0 GET_THREAD_SELF x0
bl lwp_user_setting_save bl lwp_user_setting_save
ldp x8, x9, [sp, #(CONTEXT_OFFSET_X8)] ldp x8, x9, [FRAME_REG, #(CONTEXT_OFFSET_X8)]
and x0, x8, #0xf000 and x0, x8, #0xf000
cmp x0, #0xe000 cmp x0, #0xe000
beq arch_signal_quit beq arch_signal_quit
@ -149,78 +170,46 @@ START_POINT(SVC_Handler)
cmp x0, xzr cmp x0, xzr
mov x30, x0 mov x30, x0
beq arch_syscall_exit beq arch_syscall_exit
ldp x0, x1, [sp, #(CONTEXT_OFFSET_X0)] ldp x0, x1, [FRAME_REG, #(CONTEXT_OFFSET_X0)]
ldp x2, x3, [sp, #(CONTEXT_OFFSET_X2)] ldp x2, x3, [FRAME_REG, #(CONTEXT_OFFSET_X2)]
ldp x4, x5, [sp, #(CONTEXT_OFFSET_X4)] ldp x4, x5, [FRAME_REG, #(CONTEXT_OFFSET_X4)]
ldp x6, x7, [sp, #(CONTEXT_OFFSET_X6)] ldp x6, x7, [FRAME_REG, #(CONTEXT_OFFSET_X6)]
blr x30 blr x30
/* jump explictly, make this code position independant */
b arch_syscall_exit
START_POINT_END(SVC_Handler)
.global arch_syscall_exit ldp fp, lr, [sp], 16
arch_syscall_exit: ret
/** /**
* @brief back up former x0 which is required to restart syscall, then setup * void arch_syscall_exit(long rc)
*/
arch_syscall_exit:
.global arch_syscall_exit
/**
* backup former x0 which is required to restart syscall, then setup
* syscall return value in stack frame * syscall return value in stack frame
*/ */
mov x1, sp mov x1, sp
bl arch_syscall_prepare_signal bl arch_syscall_prepare_signal
/**
* disable local irq so we don't messup with the spsr_el1 witch is not saved
* for kernel space IRQ/EXCEPTION
*/
msr daifset, #3 msr daifset, #3
ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */ b arch_ret_to_user
msr spsr_el1, x3
msr elr_el1, x2
ldp x29, x30, [sp], #0x10
msr sp_el0, x29
ldp x28, x29, [sp], #0x10
msr fpcr, x28
msr fpsr, x29
ldp x28, x29, [sp], #0x10
ldp x26, x27, [sp], #0x10
ldp x24, x25, [sp], #0x10
ldp x22, x23, [sp], #0x10
ldp x20, x21, [sp], #0x10
ldp x18, x19, [sp], #0x10
ldp x16, x17, [sp], #0x10
ldp x14, x15, [sp], #0x10
ldp x12, x13, [sp], #0x10
ldp x10, x11, [sp], #0x10
ldp x8, x9, [sp], #0x10
ldp x6, x7, [sp], #0x10
ldp x4, x5, [sp], #0x10
ldp x2, x3, [sp], #0x10
ldp x0, x1, [sp], #0x10
RESTORE_FPU sp
/* the sp is reset to the outer most level, irq and fiq are disabled */ /* the sp is reset to the outer most level, irq and fiq are disabled */
START_POINT(arch_ret_to_user) START_POINT(arch_ret_to_user)
msr daifset, #3 msr daifset, #3
/* save exception frame */
SAVE_FPU sp
stp x0, x1, [sp, #-0x10]!
stp x2, x3, [sp, #-0x10]!
stp x4, x5, [sp, #-0x10]!
stp x6, x7, [sp, #-0x10]!
stp x8, x9, [sp, #-0x10]!
stp x10, x11, [sp, #-0x10]!
stp x12, x13, [sp, #-0x10]!
stp x14, x15, [sp, #-0x10]!
stp x16, x17, [sp, #-0x10]!
stp x18, x19, [sp, #-0x10]!
stp x20, x21, [sp, #-0x10]!
stp x22, x23, [sp, #-0x10]!
stp x24, x25, [sp, #-0x10]!
stp x26, x27, [sp, #-0x10]!
stp x28, x29, [sp, #-0x10]!
mrs x0, fpcr ldr x2, [sp, #CONTEXT_OFFSET_SP_EL0]
mrs x1, fpsr msr sp_el0, x2
stp x0, x1, [sp, #-0x10]! ldr x2, [sp, #CONTEXT_OFFSET_ELR_EL1]
stp x29, x30, [sp, #-0x10]! msr elr_el1, x2
ldr x3, [sp, #CONTEXT_OFFSET_SPSR_EL1]
msr spsr_el1, x3
/* pre-action */ /* pre-action */
bl lwp_check_debug bl lwp_check_debug
@ -231,7 +220,8 @@ START_POINT(arch_ret_to_user)
msr daifclr, #3 msr daifclr, #3
mov x0, xzr mov x0, xzr
b sys_exit b sys_exit
1:
1: /* handling dbg */
/* check if dbg ops exist */ /* check if dbg ops exist */
ldr x0, =rt_dbg_ops ldr x0, =rt_dbg_ops
ldr x0, [x0] ldr x0, [x0]
@ -243,104 +233,42 @@ START_POINT(arch_ret_to_user)
orr x2, x2, x1 orr x2, x2, x1
msr spsr_el1, x2 msr spsr_el1, x2
b 3f b 3f
2: 2: /* clear software step */
bic x2, x2, x1 bic x2, x2, x1
msr spsr_el1, x2 msr spsr_el1, x2
3: 3: /* handling signal */
/** /**
* push 2 dummy words to simulate a exception frame of interrupt * push updated spsr & elr to exception frame.
* Note: in kernel state, the context switch dont saved the context * Note: these 2 maybe updated after handling dbg
*/ */
mrs x0, spsr_el1 mrs x0, spsr_el1
str x0, [sp, #CONTEXT_OFFSET_SPSR_EL1]
mrs x1, elr_el1 mrs x1, elr_el1
stp x1, x0, [sp, #-0x10]! str x1, [sp, #CONTEXT_OFFSET_ELR_EL1]
mov x0, sp mov x0, sp
/* restore the thread execution environment */
msr daifclr, #3 msr daifclr, #3
bl lwp_thread_signal_catch bl lwp_thread_signal_catch
/* restore the exception-return exec-flow */
msr daifset, #3 msr daifset, #3
ldp x1, x0, [sp], #0x10
msr spsr_el1, x0
msr elr_el1, x1
/* check debug */ /* check debug */
/* restore exception frame */
ldp x29, x30, [sp], #0x10
ldp x0, x1, [sp], #0x10
msr fpcr, x0
msr fpsr, x1
ldp x28, x29, [sp], #0x10
ldp x26, x27, [sp], #0x10
ldp x24, x25, [sp], #0x10
ldp x22, x23, [sp], #0x10
ldp x20, x21, [sp], #0x10
ldp x18, x19, [sp], #0x10
ldp x16, x17, [sp], #0x10
ldp x14, x15, [sp], #0x10
ldp x12, x13, [sp], #0x10
ldp x10, x11, [sp], #0x10
ldp x8, x9, [sp], #0x10
ldp x6, x7, [sp], #0x10
ldp x4, x5, [sp], #0x10
ldp x2, x3, [sp], #0x10
ldp x0, x1, [sp], #0x10
RESTORE_FPU sp
stp x0, x1, [sp, #-0x10]!
ldr x0, =rt_dbg_ops ldr x0, =rt_dbg_ops
ldr x0, [x0] ldr x0, [x0]
cmp x0, xzr cmp x0, xzr
ldp x0, x1, [sp], #0x10
beq 1f beq 1f
/* save */ ldr x0, [sp, #CONTEXT_OFFSET_ELR_EL1]
SAVE_FPU sp
stp x0, x1, [sp, #-0x10]!
stp x2, x3, [sp, #-0x10]!
stp x4, x5, [sp, #-0x10]!
stp x6, x7, [sp, #-0x10]!
stp x8, x9, [sp, #-0x10]!
stp x10, x11, [sp, #-0x10]!
stp x12, x13, [sp, #-0x10]!
stp x14, x15, [sp, #-0x10]!
stp x16, x17, [sp, #-0x10]!
stp x18, x19, [sp, #-0x10]!
stp x20, x21, [sp, #-0x10]!
stp x22, x23, [sp, #-0x10]!
stp x24, x25, [sp, #-0x10]!
stp x26, x27, [sp, #-0x10]!
stp x28, x29, [sp, #-0x10]!
mrs x0, fpcr
mrs x1, fpsr
stp x0, x1, [sp, #-0x10]!
stp x29, x30, [sp, #-0x10]!
mrs x0, elr_el1
bl dbg_attach_req bl dbg_attach_req
/* restore */
ldp x29, x30, [sp], #0x10
ldp x0, x1, [sp], #0x10
msr fpcr, x0
msr fpsr, x1
ldp x28, x29, [sp], #0x10
ldp x26, x27, [sp], #0x10
ldp x24, x25, [sp], #0x10
ldp x22, x23, [sp], #0x10
ldp x20, x21, [sp], #0x10
ldp x18, x19, [sp], #0x10
ldp x16, x17, [sp], #0x10
ldp x14, x15, [sp], #0x10
ldp x12, x13, [sp], #0x10
ldp x10, x11, [sp], #0x10
ldp x8, x9, [sp], #0x10
ldp x6, x7, [sp], #0x10
ldp x4, x5, [sp], #0x10
ldp x2, x3, [sp], #0x10
ldp x0, x1, [sp], #0x10
RESTORE_FPU sp
1: 1:
RESTORE_IRQ_CONTEXT_NO_SPEL0
eret eret
START_POINT_END(arch_ret_to_user) START_POINT_END(arch_ret_to_user)
@ -410,32 +338,7 @@ arch_syscall_restart:
/* restore previous exception frame */ /* restore previous exception frame */
msr spsel, #0 msr spsel, #0
ldp x2, x3, [sp], #0x10 RESTORE_IRQ_CONTEXT_NO_SPEL0
msr elr_el1, x2
msr spsr_el1, x3
ldp x29, x30, [sp], #0x10
ldp x28, x29, [sp], #0x10
msr fpcr, x28
msr fpsr, x29
ldp x28, x29, [sp], #0x10
ldp x26, x27, [sp], #0x10
ldp x24, x25, [sp], #0x10
ldp x22, x23, [sp], #0x10
ldp x20, x21, [sp], #0x10
ldp x18, x19, [sp], #0x10
ldp x16, x17, [sp], #0x10
ldp x14, x15, [sp], #0x10
ldp x12, x13, [sp], #0x10
ldp x10, x11, [sp], #0x10
ldp x8, x9, [sp], #0x10
ldp x6, x7, [sp], #0x10
ldp x4, x5, [sp], #0x10
ldp x2, x3, [sp], #0x10
ldp x0, x1, [sp], #0x10
RESTORE_FPU sp
msr spsel, #1 msr spsel, #1
@ -443,8 +346,8 @@ arch_syscall_restart:
arch_signal_quit: arch_signal_quit:
/* drop current exception frame */ /* drop current exception frame & sigreturn */
add sp, sp, #CONTEXT_SIZE add sp, sp, #(CONTEXT_SIZE + 0x10)
mov x1, sp mov x1, sp
mrs x0, sp_el0 mrs x0, sp_el0
bl arch_signal_ucontext_restore bl arch_signal_ucontext_restore
@ -460,35 +363,12 @@ arch_signal_quit:
/* restore previous exception frame */ /* restore previous exception frame */
msr spsel, #0 msr spsel, #0
ldp x2, x3, [sp], #0x10 RESTORE_IRQ_CONTEXT_NO_SPEL0
msr elr_el1, x2
msr spsr_el1, x3
ldp x29, x30, [sp], #0x10
ldp x28, x29, [sp], #0x10
msr fpcr, x28
msr fpsr, x29
ldp x28, x29, [sp], #0x10
ldp x26, x27, [sp], #0x10
ldp x24, x25, [sp], #0x10
ldp x22, x23, [sp], #0x10
ldp x20, x21, [sp], #0x10
ldp x18, x19, [sp], #0x10
ldp x16, x17, [sp], #0x10
ldp x14, x15, [sp], #0x10
ldp x12, x13, [sp], #0x10
ldp x10, x11, [sp], #0x10
ldp x8, x9, [sp], #0x10
ldp x6, x7, [sp], #0x10
ldp x4, x5, [sp], #0x10
ldp x2, x3, [sp], #0x10
ldp x0, x1, [sp], #0x10
RESTORE_FPU sp
msr spsel, #1 msr spsel, #1
SAVE_IRQ_CONTEXT
b arch_ret_to_user b arch_ret_to_user
/** /**

View File

@ -54,6 +54,52 @@
#include "../up/context_gcc.h" #include "../up/context_gcc.h"
#endif #endif
.macro RESTORE_IRQ_CONTEXT_NO_SPEL0
ldp x2, x3, [sp], #0x10
msr elr_el1, x2
msr spsr_el1, x3
ldp x29, x30, [sp], #0x10
ldp x28, x29, [sp], #0x10
msr fpcr, x28
msr fpsr, x29
ldp x28, x29, [sp], #0x10
ldp x26, x27, [sp], #0x10
ldp x24, x25, [sp], #0x10
ldp x22, x23, [sp], #0x10
ldp x20, x21, [sp], #0x10
ldp x18, x19, [sp], #0x10
ldp x16, x17, [sp], #0x10
ldp x14, x15, [sp], #0x10
ldp x12, x13, [sp], #0x10
ldp x10, x11, [sp], #0x10
ldp x8, x9, [sp], #0x10
ldp x6, x7, [sp], #0x10
ldp x4, x5, [sp], #0x10
ldp x2, x3, [sp], #0x10
ldp x0, x1, [sp], #0x10
RESTORE_FPU sp
.endm
.macro EXCEPTION_SWITCH, eframex, tmpx
#ifdef RT_USING_SMART
/**
* test the spsr for execution level 0
* That is { PSTATE.[NZCV] := SPSR_EL1 & M.EL0t }
*/
ldr \tmpx, [\eframex, #CONTEXT_OFFSET_SPSR_EL1]
and \tmpx, \tmpx, 0x1f
cbz \tmpx, 1f
b 2f
1:
b arch_ret_to_user
2:
#endif /* RT_USING_SMART */
.endm
.macro SAVE_USER_CTX, eframex, tmpx .macro SAVE_USER_CTX, eframex, tmpx
#ifdef RT_USING_SMART #ifdef RT_USING_SMART
mrs \tmpx, spsr_el1 mrs \tmpx, spsr_el1

View File

@ -19,17 +19,23 @@
#include <asm-fpu.h> #include <asm-fpu.h>
#include <armv8.h> #include <armv8.h>
.macro RESTORE_CONTEXT_SWITCH using_sp /* restore address space */
/* Set the SP to point to the stack of the task being restored. */ .macro RESTORE_ADDRESS_SPACE
mov sp, \using_sp
#ifdef RT_USING_SMART #ifdef RT_USING_SMART
bl rt_thread_self bl rt_thread_self
mov x19, x0 mov x19, x0
bl lwp_aspace_switch bl lwp_aspace_switch
mov x0, x19 mov x0, x19
bl lwp_user_setting_restore bl lwp_user_setting_restore
#endif /* RT_USING_SMART */ #endif
.endm
.macro RESTORE_CONTEXT_SWITCH using_sp
/* Set the SP to point to the stack of the task being restored. */
mov sp, \using_sp
RESTORE_ADDRESS_SPACE
_RESTORE_CONTEXT_SWITCH _RESTORE_CONTEXT_SWITCH
.endm .endm

View File

@ -28,7 +28,8 @@ vector_fiq:
SAVE_IRQ_CONTEXT SAVE_IRQ_CONTEXT
bl rt_hw_trap_fiq bl rt_hw_trap_fiq
RESTORE_IRQ_CONTEXT
b rt_hw_irq_exit
.globl rt_thread_switch_interrupt_flag .globl rt_thread_switch_interrupt_flag
.globl rt_hw_context_switch_interrupt_do .globl rt_hw_context_switch_interrupt_do

View File

@ -79,7 +79,11 @@ START_POINT(vector_exception)
bl rt_hw_trap_exception bl rt_hw_trap_exception
RESTORE_USER_CTX EFRAMEX, x0 RESTORE_USER_CTX EFRAMEX, x0
/* do exception switch for IRQ/exception handlers */
EXCEPTION_SWITCH sp, x0
RESTORE_IRQ_CONTEXT RESTORE_IRQ_CONTEXT
eret
START_POINT_END(vector_exception) START_POINT_END(vector_exception)
START_POINT(vector_serror) START_POINT(vector_serror)
@ -123,4 +127,8 @@ START_POINT_END(vector_irq)
rt_hw_irq_exit: rt_hw_irq_exit:
.globl rt_hw_irq_exit .globl rt_hw_irq_exit
/* do exception switch for IRQ/exception handlers */
EXCEPTION_SWITCH sp, x0
RESTORE_IRQ_CONTEXT RESTORE_IRQ_CONTEXT
eret