diff --git a/libcpu/aarch64/common/include/armv8.h b/libcpu/aarch64/common/include/armv8.h index 9063a8337d..7388b078a2 100644 --- a/libcpu/aarch64/common/include/armv8.h +++ b/libcpu/aarch64/common/include/armv8.h @@ -11,6 +11,12 @@ #ifndef __ARMV8_H__ #define __ARMV8_H__ +#include + +#ifdef ARCH_USING_HW_THREAD_SELF +#define ARM64_THREAD_REG tpidr_el1 +#endif /* ARCH_USING_HW_THREAD_SELF */ + #ifdef __ASSEMBLY__ /********************* diff --git a/libcpu/aarch64/common/include/cpuport.h b/libcpu/aarch64/common/include/cpuport.h index e5e256b57b..e5216f3b89 100644 --- a/libcpu/aarch64/common/include/cpuport.h +++ b/libcpu/aarch64/common/include/cpuport.h @@ -57,14 +57,14 @@ void _thread_start(void); rt_inline struct rt_thread *rt_hw_thread_self(void) { struct rt_thread *thread; - __asm__ volatile ("mrs %0, tpidr_el1":"=r"(thread)); + __asm__ volatile ("mrs %0, " RT_STRINGIFY(ARM64_THREAD_REG) :"=r"(thread)); return thread; } rt_inline void rt_hw_thread_set_self(struct rt_thread *thread) { - __asm__ volatile ("msr tpidr_el1, %0"::"r"(thread)); + __asm__ volatile ("msr " RT_STRINGIFY(ARM64_THREAD_REG) ", %0"::"r"(thread)); } #endif /* ARCH_USING_HW_THREAD_SELF */ diff --git a/libcpu/aarch64/common/mp/context_gcc.S b/libcpu/aarch64/common/mp/context_gcc.S index adc3587651..96743391bf 100644 --- a/libcpu/aarch64/common/mp/context_gcc.S +++ b/libcpu/aarch64/common/mp/context_gcc.S @@ -27,6 +27,12 @@ .globl rt_hw_context_switch_to +.macro update_tidr, srcx +#ifdef ARCH_USING_HW_THREAD_SELF + msr ARM64_THREAD_REG, \srcx +#endif /* ARCH_USING_HW_THREAD_SELF */ +.endm + /* * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread); * X0 --> to (thread stack) @@ -35,6 +41,7 @@ rt_hw_context_switch_to: ldr x0, [x0] mov sp, x0 + update_tidr x1 /* reserved to_thread */ mov x19, x1 @@ -62,6 +69,7 @@ rt_hw_context_switch: str x3, [x0] // store sp in preempted tasks TCB ldr x0, [x1] // get new task stack pointer mov sp, x0 + update_tidr x2 /* backup thread self */ mov x19, x2 @@ -119,6 +127,7 @@ rt_hw_context_switch_interrupt: /* setup SP to to-thread's */ ldr x0, [TO_SPP] mov sp, x0 + update_tidr TO_TCB mov x0, TO_TCB bl rt_cpus_lock_status_restore diff --git a/src/scheduler_mp.c b/src/scheduler_mp.c index 7a7bd61e00..6a228e6b53 100644 --- a/src/scheduler_mp.c +++ b/src/scheduler_mp.c @@ -1069,9 +1069,6 @@ void rt_sched_post_ctx_switch(struct rt_thread *thread) } /* safe to access since irq is masked out */ pcpu->current_thread = thread; -#ifdef ARCH_USING_HW_THREAD_SELF - rt_hw_thread_set_self(thread); -#endif /* ARCH_USING_HW_THREAD_SELF */ } #ifdef RT_DEBUGING_CRITICAL