2023-07-05 13:38:29 +08:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2006-2023, RT-Thread Development Team
|
|
|
|
*
|
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
|
|
*
|
|
|
|
* Change Logs:
|
|
|
|
* Date Author Notes
|
|
|
|
* 2023-05-18 GuEe-GUI first version
|
|
|
|
*/
|
|
|
|
|
2023-07-25 15:34:57 +08:00
|
|
|
#include <rthw.h>
|
2023-07-05 13:38:29 +08:00
|
|
|
#include <rtatomic.h>
|
|
|
|
|
|
|
|
rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
|
|
|
|
{
|
|
|
|
rt_atomic_t ret;
|
|
|
|
|
|
|
|
__asm__ volatile (
|
2023-07-25 15:34:57 +08:00
|
|
|
" ldr %0, %1\n"
|
2023-07-05 13:38:29 +08:00
|
|
|
" dmb ish"
|
|
|
|
: "=r" (ret)
|
|
|
|
: "Q" (*ptr)
|
|
|
|
: "memory");
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
|
|
|
|
{
|
|
|
|
__asm__ volatile (
|
2023-07-25 15:34:57 +08:00
|
|
|
" str %1, %0\n"
|
2023-07-05 13:38:29 +08:00
|
|
|
" dmb ish"
|
|
|
|
: "=Q" (*ptr)
|
|
|
|
: "r" (val)
|
|
|
|
: "memory");
|
|
|
|
}
|
|
|
|
|
|
|
|
#define AARCH64_ATOMIC_OP_RETURN(op, ins, constraint) \
|
|
|
|
rt_atomic_t rt_hw_atomic_##op(volatile rt_atomic_t *ptr, rt_atomic_t in_val) \
|
|
|
|
{ \
|
|
|
|
rt_atomic_t tmp, val, result; \
|
|
|
|
\
|
|
|
|
__asm__ volatile ( \
|
|
|
|
" prfm pstl1strm, %3\n" \
|
2023-07-25 15:34:57 +08:00
|
|
|
"1: ldxr %0, %3\n" \
|
|
|
|
" "#ins " %1, %0, %4\n" \
|
|
|
|
" stlxr %w2, %1, %3\n" \
|
2023-07-05 13:38:29 +08:00
|
|
|
" cbnz %w2, 1b\n" \
|
|
|
|
" dmb ish" \
|
|
|
|
: "=&r" (result), "=&r" (val), "=&r" (tmp), "+Q" (*ptr) \
|
|
|
|
: __RT_STRINGIFY(constraint) "r" (in_val) \
|
|
|
|
: "memory"); \
|
|
|
|
\
|
|
|
|
return result; \
|
|
|
|
}
|
|
|
|
|
|
|
|
AARCH64_ATOMIC_OP_RETURN(add, add, I)
|
|
|
|
AARCH64_ATOMIC_OP_RETURN(sub, sub, J)
|
|
|
|
AARCH64_ATOMIC_OP_RETURN(and, and, K)
|
|
|
|
AARCH64_ATOMIC_OP_RETURN(or, orr, K)
|
|
|
|
AARCH64_ATOMIC_OP_RETURN(xor, eor, K)
|
|
|
|
|
|
|
|
rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
|
|
|
|
{
|
|
|
|
rt_atomic_t ret, tmp;
|
|
|
|
|
|
|
|
__asm__ volatile (
|
|
|
|
" prfm pstl1strm, %2\n"
|
2023-07-25 15:34:57 +08:00
|
|
|
"1: ldxr %0, %2\n"
|
|
|
|
" stlxr %w1, %3, %2\n"
|
2023-07-05 13:38:29 +08:00
|
|
|
" cbnz %w1, 1b\n"
|
|
|
|
" dmb ish"
|
|
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*ptr)
|
|
|
|
: "r" (val)
|
|
|
|
: "memory");
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
|
|
|
|
{
|
|
|
|
rt_hw_atomic_and(ptr, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
|
|
|
|
{
|
|
|
|
return rt_hw_atomic_or(ptr, 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, rt_atomic_t *old, rt_atomic_t new)
|
|
|
|
{
|
|
|
|
rt_atomic_t tmp, oldval;
|
|
|
|
__asm__ volatile (
|
|
|
|
" prfm pstl1strm, %2\n"
|
2023-07-25 15:34:57 +08:00
|
|
|
"1: ldxr %0, %2\n"
|
|
|
|
" eor %1, %0, %3\n"
|
|
|
|
" cbnz %1, 2f\n"
|
|
|
|
" stlxr %w1, %4, %2\n"
|
2023-07-05 13:38:29 +08:00
|
|
|
" cbnz %w1, 1b\n"
|
|
|
|
" dmb ish\n"
|
2024-09-04 06:14:06 +08:00
|
|
|
" mov %w1, #1\n"
|
|
|
|
" b 3f\n"
|
|
|
|
"2: str %0, [%5]\n"
|
|
|
|
" mov %w1, #0\n"
|
|
|
|
"3:"
|
2023-07-05 13:38:29 +08:00
|
|
|
: "=&r" (oldval), "=&r" (tmp), "+Q" (*ptr)
|
2024-09-04 06:14:06 +08:00
|
|
|
: "Kr" (*old), "r" (new), "r" (old)
|
2023-07-05 13:38:29 +08:00
|
|
|
: "memory");
|
|
|
|
|
2024-09-04 06:14:06 +08:00
|
|
|
return tmp;
|
2023-07-05 13:38:29 +08:00
|
|
|
}
|
2024-09-04 06:14:06 +08:00
|
|
|
|