2021-09-22 17:57:45 +08:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2006-2020, RT-Thread Development Team
|
|
|
|
*
|
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
|
|
*
|
|
|
|
* Date Author Notes
|
|
|
|
* 2020-01-15 bigmagic the first version
|
|
|
|
* 2020-08-10 SummerGift support clang compiler
|
2021-11-30 10:32:23 +08:00
|
|
|
* 2021-11-04 GuEe-GUI set sp with SP_ELx
|
2022-01-07 13:49:06 +08:00
|
|
|
* 2021-12-28 GuEe-GUI add smp support
|
2021-09-22 17:57:45 +08:00
|
|
|
*/
|
|
|
|
|
2022-01-07 13:49:06 +08:00
|
|
|
#include "rtconfig.h"
|
2021-09-22 17:57:45 +08:00
|
|
|
.section ".text.entrypoint","ax"
|
|
|
|
|
2022-01-07 13:49:06 +08:00
|
|
|
#define SECONDARY_STACK_SIZE 4096
|
|
|
|
|
2021-09-22 17:57:45 +08:00
|
|
|
.globl _start
|
2022-01-07 13:49:06 +08:00
|
|
|
.globl secondary_cpu_start
|
2021-09-22 17:57:45 +08:00
|
|
|
_start:
|
|
|
|
mrs x1, mpidr_el1
|
2022-01-07 13:49:06 +08:00
|
|
|
and x1, x1, #0xff
|
|
|
|
cbnz x1, cpu_idle /* If cpu id > 0, stop slave cores */
|
|
|
|
|
|
|
|
secondary_cpu_start:
|
|
|
|
#ifdef RT_USING_SMP
|
|
|
|
/* Read cpu mpidr_el1 */
|
|
|
|
mrs x1, mpidr_el1
|
|
|
|
|
|
|
|
/* Read cpu id */
|
|
|
|
ldr x0, =rt_cpu_mpidr_early /* BSP must be defined `rt_cpu_mpidr_early' table in smp */
|
|
|
|
mov x2, #0
|
|
|
|
|
|
|
|
cpu_id_confirm:
|
|
|
|
add x2, x2, #1 /* Next cpu id inc */
|
|
|
|
ldr x3, [x0], #8
|
|
|
|
cmp x3, #0
|
|
|
|
beq cpu_idle /* Mean that `rt_cpu_mpidr_early' table is end */
|
|
|
|
cmp x3, x1
|
|
|
|
bne cpu_id_confirm
|
|
|
|
|
|
|
|
/* Get cpu id success */
|
|
|
|
sub x0, x2, #1
|
|
|
|
msr tpidr_el1, x0 /* Save cpu id global */
|
|
|
|
cbz x0, cpu_setup /* Only go to cpu_setup when cpu id = 0 */
|
|
|
|
|
|
|
|
/* Set current cpu's stack top */
|
|
|
|
sub x0, x0, #1
|
|
|
|
mov x1, #SECONDARY_STACK_SIZE
|
|
|
|
adr x2, .secondary_cpu_stack_top
|
|
|
|
msub x1, x0, x1, x2
|
|
|
|
|
|
|
|
b cpu_check_el
|
|
|
|
#else
|
|
|
|
msr tpidr_el1, xzr
|
|
|
|
b cpu_setup
|
|
|
|
#endif /* RT_USING_SMP */
|
2021-09-22 17:57:45 +08:00
|
|
|
|
|
|
|
cpu_idle:
|
|
|
|
wfe
|
|
|
|
b cpu_idle
|
|
|
|
|
|
|
|
cpu_setup:
|
|
|
|
ldr x1, =_start
|
|
|
|
|
2022-01-07 13:49:06 +08:00
|
|
|
cpu_check_el:
|
2021-09-22 17:57:45 +08:00
|
|
|
mrs x0, CurrentEL /* CurrentEL Register. bit 2, 3. Others reserved */
|
|
|
|
and x0, x0, #12 /* Clear reserved bits */
|
|
|
|
|
|
|
|
/* Running at EL3? */
|
|
|
|
cmp x0, #12 /* EL3 value is 0b1100 */
|
|
|
|
bne cpu_not_in_el3
|
|
|
|
|
|
|
|
/* Should never be executed, just for completeness. (EL3) */
|
2021-11-30 10:32:23 +08:00
|
|
|
mov x2, #(1 << 0) /* EL0 and EL1 are in Non-Secure state */
|
|
|
|
orr x2, x2, #(1 << 4) /* RES1 */
|
|
|
|
orr x2, x2, #(1 << 5) /* RES1 */
|
|
|
|
orr x2, x2, #(1 << 7) /* SMC instructions are undefined at EL1 and above */
|
|
|
|
orr x2, x2, #(1 << 8) /* HVC instructions are enabled at EL1 and above */
|
|
|
|
orr x2, x2, #(1 << 10) /* The next lower level is AArch64 */
|
2021-09-22 17:57:45 +08:00
|
|
|
msr scr_el3, x2
|
|
|
|
|
2022-03-07 22:41:56 +08:00
|
|
|
mov x2, #9 /* Next level is 0b1001->EL2h */
|
|
|
|
orr x2, x2, #(1 << 6) /* Mask FIQ */
|
|
|
|
orr x2, x2, #(1 << 7) /* Mask IRQ */
|
|
|
|
orr x2, x2, #(1 << 8) /* Mask SError */
|
|
|
|
orr x2, x2, #(1 << 9) /* Mask Debug Exception */
|
|
|
|
msr spsr_el3, x2
|
|
|
|
adr x2, cpu_in_el2
|
2021-09-22 17:57:45 +08:00
|
|
|
msr elr_el3, x2
|
2022-03-07 22:41:56 +08:00
|
|
|
eret
|
2021-09-22 17:57:45 +08:00
|
|
|
|
|
|
|
cpu_not_in_el3: /* Running at EL2 or EL1 */
|
|
|
|
cmp x0, #4 /* EL1 = 0100 */
|
2022-03-07 22:41:56 +08:00
|
|
|
beq cpu_in_el1
|
2021-09-22 17:57:45 +08:00
|
|
|
|
|
|
|
cpu_in_el2:
|
|
|
|
/* Enable CNTP for EL1 */
|
|
|
|
mrs x0, cnthctl_el2 /* Counter-timer Hypervisor Control register */
|
|
|
|
orr x0, x0, #3
|
|
|
|
msr cnthctl_el2, x0
|
|
|
|
msr cntvoff_el2, xzr
|
|
|
|
|
|
|
|
mov x0, #(1 << 31) /* Enable AArch64 in EL1 */
|
|
|
|
orr x0, x0, #(1 << 1) /* SWIO hardwired on Pi3 */
|
|
|
|
msr hcr_el2, x0
|
|
|
|
|
2022-03-07 22:41:56 +08:00
|
|
|
mov x2, #5 /* Next level is 0b0101->EL1h */
|
|
|
|
orr x2, x2, #(1 << 6) /* Mask FIQ */
|
|
|
|
orr x2, x2, #(1 << 7) /* Mask IRQ */
|
|
|
|
orr x2, x2, #(1 << 8) /* Mask SError */
|
|
|
|
orr x2, x2, #(1 << 9) /* Mask Debug Exception */
|
|
|
|
msr spsr_el2, x2
|
2021-09-22 17:57:45 +08:00
|
|
|
adr x2, cpu_in_el1
|
|
|
|
msr elr_el2, x2
|
|
|
|
eret
|
|
|
|
|
|
|
|
cpu_in_el1:
|
2021-11-30 10:32:23 +08:00
|
|
|
msr spsel, #1
|
2021-09-22 17:57:45 +08:00
|
|
|
mov sp, x1 /* Set sp in el1 */
|
|
|
|
|
|
|
|
/* Avoid trap from SIMD or float point instruction */
|
|
|
|
mov x1, #0x00300000 /* Don't trap any SIMD/FP instructions in both EL0 and EL1 */
|
|
|
|
msr cpacr_el1, x1
|
|
|
|
|
|
|
|
mrs x1, sctlr_el1
|
|
|
|
orr x1, x1, #(1 << 12) /* Enable Instruction */
|
|
|
|
bic x1, x1, #(3 << 3) /* Disable SP Alignment check */
|
|
|
|
bic x1, x1, #(1 << 1) /* Disable Alignment check */
|
|
|
|
msr sctlr_el1, x1
|
|
|
|
|
2022-01-07 13:49:06 +08:00
|
|
|
#ifdef RT_USING_SMP
|
|
|
|
ldr x1, =_start
|
|
|
|
cmp sp, x1
|
|
|
|
bne secondary_cpu_c_start
|
|
|
|
#endif /* RT_USING_SMP */
|
|
|
|
|
|
|
|
ldr x0, =__bss_start
|
|
|
|
ldr x1, =__bss_end
|
|
|
|
sub x2, x1, x0
|
|
|
|
mov x3, x1
|
|
|
|
cmp x2, #7
|
|
|
|
bls clean_bss_check
|
|
|
|
|
|
|
|
clean_bss_loop_quad:
|
|
|
|
str xzr, [x0], #8
|
|
|
|
sub x2, x3, x0
|
|
|
|
cmp x2, #7
|
|
|
|
bhi clean_bss_loop_quad
|
|
|
|
cmp x1, x0
|
|
|
|
bls jump_to_entry
|
|
|
|
|
|
|
|
clean_bss_loop_byte:
|
|
|
|
str xzr, [x0], #1
|
|
|
|
|
|
|
|
clean_bss_check:
|
|
|
|
cmp x1, x0
|
|
|
|
bhi clean_bss_loop_byte
|
2021-09-22 17:57:45 +08:00
|
|
|
|
|
|
|
jump_to_entry:
|
|
|
|
b rtthread_startup
|
|
|
|
b cpu_idle /* For failsafe, halt this core too */
|
2022-01-07 13:49:06 +08:00
|
|
|
|
|
|
|
#ifdef RT_USING_SMP
|
|
|
|
.align 12
|
|
|
|
.secondary_cpu_stack:
|
|
|
|
.space (SECONDARY_STACK_SIZE * (RT_CPUS_NR - 1))
|
|
|
|
.secondary_cpu_stack_top:
|
|
|
|
#endif
|