feat: arm64: update thread self on sp-switch
This patch improves the atomicity of context switching by ensuring that the stack pointer (sp) and thread self updates occur simultaneously. This enhancement is crucial for maintaining thread safety and preventing potential inconsistencies during context switches. Changes: - Modified `cpuport.h` to use `ARM64_THREAD_REG` for thread self access. - Added an `update_tidr` macro in `context_gcc.S` to streamline thread ID updates. - Adjusted `rt_hw_context_switch_to` and `rt_hw_context_switch` to call `update_tidr`, ensuring atomic updates during context switches. - Cleaned up `scheduler_mp.c` by removing redundant thread self assignments. Signed-off-by: Shell <smokewood@qq.com>
This commit is contained in:
parent
906d1caea6
commit
5796e0d646
|
@ -11,6 +11,12 @@
|
||||||
#ifndef __ARMV8_H__
|
#ifndef __ARMV8_H__
|
||||||
#define __ARMV8_H__
|
#define __ARMV8_H__
|
||||||
|
|
||||||
|
#include <rtconfig.h>
|
||||||
|
|
||||||
|
#ifdef ARCH_USING_HW_THREAD_SELF
|
||||||
|
#define ARM64_THREAD_REG tpidr_el1
|
||||||
|
#endif /* ARCH_USING_HW_THREAD_SELF */
|
||||||
|
|
||||||
#ifdef __ASSEMBLY__
|
#ifdef __ASSEMBLY__
|
||||||
|
|
||||||
/*********************
|
/*********************
|
||||||
|
|
|
@ -57,14 +57,14 @@ void _thread_start(void);
|
||||||
rt_inline struct rt_thread *rt_hw_thread_self(void)
|
rt_inline struct rt_thread *rt_hw_thread_self(void)
|
||||||
{
|
{
|
||||||
struct rt_thread *thread;
|
struct rt_thread *thread;
|
||||||
__asm__ volatile ("mrs %0, tpidr_el1":"=r"(thread));
|
__asm__ volatile ("mrs %0, " RT_STRINGIFY(ARM64_THREAD_REG) :"=r"(thread));
|
||||||
|
|
||||||
return thread;
|
return thread;
|
||||||
}
|
}
|
||||||
|
|
||||||
rt_inline void rt_hw_thread_set_self(struct rt_thread *thread)
|
rt_inline void rt_hw_thread_set_self(struct rt_thread *thread)
|
||||||
{
|
{
|
||||||
__asm__ volatile ("msr tpidr_el1, %0"::"r"(thread));
|
__asm__ volatile ("msr " RT_STRINGIFY(ARM64_THREAD_REG) ", %0"::"r"(thread));
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* ARCH_USING_HW_THREAD_SELF */
|
#endif /* ARCH_USING_HW_THREAD_SELF */
|
||||||
|
|
|
@ -27,6 +27,12 @@
|
||||||
|
|
||||||
.globl rt_hw_context_switch_to
|
.globl rt_hw_context_switch_to
|
||||||
|
|
||||||
|
.macro update_tidr, srcx
|
||||||
|
#ifdef ARCH_USING_HW_THREAD_SELF
|
||||||
|
msr ARM64_THREAD_REG, \srcx
|
||||||
|
#endif /* ARCH_USING_HW_THREAD_SELF */
|
||||||
|
.endm
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
|
* void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
|
||||||
* X0 --> to (thread stack)
|
* X0 --> to (thread stack)
|
||||||
|
@ -35,6 +41,7 @@
|
||||||
rt_hw_context_switch_to:
|
rt_hw_context_switch_to:
|
||||||
ldr x0, [x0]
|
ldr x0, [x0]
|
||||||
mov sp, x0
|
mov sp, x0
|
||||||
|
update_tidr x1
|
||||||
|
|
||||||
/* reserved to_thread */
|
/* reserved to_thread */
|
||||||
mov x19, x1
|
mov x19, x1
|
||||||
|
@ -62,6 +69,7 @@ rt_hw_context_switch:
|
||||||
str x3, [x0] // store sp in preempted tasks TCB
|
str x3, [x0] // store sp in preempted tasks TCB
|
||||||
ldr x0, [x1] // get new task stack pointer
|
ldr x0, [x1] // get new task stack pointer
|
||||||
mov sp, x0
|
mov sp, x0
|
||||||
|
update_tidr x2
|
||||||
|
|
||||||
/* backup thread self */
|
/* backup thread self */
|
||||||
mov x19, x2
|
mov x19, x2
|
||||||
|
@ -119,6 +127,7 @@ rt_hw_context_switch_interrupt:
|
||||||
/* setup SP to to-thread's */
|
/* setup SP to to-thread's */
|
||||||
ldr x0, [TO_SPP]
|
ldr x0, [TO_SPP]
|
||||||
mov sp, x0
|
mov sp, x0
|
||||||
|
update_tidr TO_TCB
|
||||||
|
|
||||||
mov x0, TO_TCB
|
mov x0, TO_TCB
|
||||||
bl rt_cpus_lock_status_restore
|
bl rt_cpus_lock_status_restore
|
||||||
|
|
|
@ -1069,9 +1069,6 @@ void rt_sched_post_ctx_switch(struct rt_thread *thread)
|
||||||
}
|
}
|
||||||
/* safe to access since irq is masked out */
|
/* safe to access since irq is masked out */
|
||||||
pcpu->current_thread = thread;
|
pcpu->current_thread = thread;
|
||||||
#ifdef ARCH_USING_HW_THREAD_SELF
|
|
||||||
rt_hw_thread_set_self(thread);
|
|
||||||
#endif /* ARCH_USING_HW_THREAD_SELF */
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef RT_DEBUGING_CRITICAL
|
#ifdef RT_DEBUGING_CRITICAL
|
||||||
|
|
Loading…
Reference in New Issue