原始版本

This commit is contained in:
冯佳
2025-06-19 21:56:46 +08:00
parent fe98e5f010
commit a4841450cf
4152 changed files with 1910684 additions and 0 deletions

View File

@ -0,0 +1,120 @@
/*
* Copyright (c) 2006-2020, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2021-05-18 Jesven the first version
* 2023-06-24 Shell Support backtrace for user thread
* 2024-01-06 Shell Fix barrier on irq_disable/enable
* 2024-03-28 Shell Move vector handling codes from context_gcc.S
*/
#ifndef __ASSEMBLY__
#define __ASSEMBLY__
#endif
#include "context_gcc.h"
#include "../include/vector_gcc.h"
#include <rtconfig.h>
#include <asm-generic.h>
#include <asm-fpu.h>
#include <armv8.h>
/**
* Context switch status
*/
.section .bss
rt_interrupt_from_thread:
.quad 0
rt_interrupt_to_thread:
.quad 0
rt_thread_switch_interrupt_flag:
.quad 0
.section .text
/*
* void rt_hw_context_switch_to(rt_ubase_t to);
* X0 --> to sp
*/
.globl rt_hw_context_switch_to
rt_hw_context_switch_to:
clrex
ldr x0, [x0]
RESTORE_CONTEXT_SWITCH x0
NEVER_RETURN
/*
* void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
* X0 --> from sp
* X1 --> to sp
* X2 --> to thread
*/
.globl rt_hw_context_switch
rt_hw_context_switch:
clrex
SAVE_CONTEXT_SWITCH x19, x20
mov x2, sp
str x2, [x0] // store sp in preempted tasks TCB
ldr x0, [x1] // get new task stack pointer
RESTORE_CONTEXT_SWITCH x0
NEVER_RETURN
.globl rt_thread_switch_interrupt_flag
.globl rt_interrupt_from_thread
.globl rt_interrupt_to_thread
.globl rt_hw_context_switch_interrupt
/*
* void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
*/
rt_hw_context_switch_interrupt:
ldr x6, =rt_thread_switch_interrupt_flag
ldr x7, [x6]
cmp x7, #1
b.eq _reswitch
/* set rt_interrupt_from_thread */
ldr x4, =rt_interrupt_from_thread
str x0, [x4]
/* set rt_thread_switch_interrupt_flag to 1 */
mov x7, #1
str x7, [x6]
stp x1, x30, [sp, #-0x10]!
#ifdef RT_USING_SMART
mov x0, x2
bl lwp_user_setting_save
#endif
ldp x1, x30, [sp], #0x10
_reswitch:
ldr x6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
str x1, [x6]
ret
.globl rt_hw_context_switch_interrupt_do
/**
* rt_hw_context_switch_interrupt_do(void)
*/
rt_hw_context_switch_interrupt_do:
clrex
SAVE_CONTEXT_SWITCH_FAST
ldr x3, =rt_interrupt_from_thread
ldr x4, [x3]
mov x0, sp
str x0, [x4] // store sp in preempted tasks's tcb
ldr x3, =rt_interrupt_to_thread
ldr x4, [x3]
ldr x0, [x4] // get new task's stack pointer
RESTORE_CONTEXT_SWITCH x0
NEVER_RETURN

View File

@ -0,0 +1,83 @@
/*
* Copyright (c) 2006-2024, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2024-03-25 Shell Trimming unecessary ops and
* improve the performance of ctx switch
*/
#ifndef __ARM64_CONTEXT_H__
#define __ARM64_CONTEXT_H__
#include "../include/context_gcc.h"
#include <rtconfig.h>
#include <asm-generic.h>
#include <asm-fpu.h>
#include <armv8.h>
/* restore address space */
.macro RESTORE_ADDRESS_SPACE
#ifdef RT_USING_SMART
bl rt_thread_self
mov x19, x0
bl lwp_aspace_switch
mov x0, x19
bl lwp_user_setting_restore
#endif
.endm
.macro RESTORE_CONTEXT_SWITCH using_sp
/* Set the SP to point to the stack of the task being restored. */
mov sp, \using_sp
RESTORE_ADDRESS_SPACE
_RESTORE_CONTEXT_SWITCH
.endm
.macro RESTORE_IRQ_CONTEXT
#ifdef RT_USING_SMART
BL rt_thread_self
MOV X19, X0
BL lwp_aspace_switch
MOV X0, X19
BL lwp_user_setting_restore
#endif
LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
TST X3, #0x1f
MSR SPSR_EL1, X3
MSR ELR_EL1, X2
LDP X29, X30, [SP], #0x10
MSR SP_EL0, X29
LDP X28, X29, [SP], #0x10
MSR FPCR, X28
MSR FPSR, X29
LDP X28, X29, [SP], #0x10
LDP X26, X27, [SP], #0x10
LDP X24, X25, [SP], #0x10
LDP X22, X23, [SP], #0x10
LDP X20, X21, [SP], #0x10
LDP X18, X19, [SP], #0x10
LDP X16, X17, [SP], #0x10
LDP X14, X15, [SP], #0x10
LDP X12, X13, [SP], #0x10
LDP X10, X11, [SP], #0x10
LDP X8, X9, [SP], #0x10
LDP X6, X7, [SP], #0x10
LDP X4, X5, [SP], #0x10
LDP X2, X3, [SP], #0x10
LDP X0, X1, [SP], #0x10
RESTORE_FPU SP
#ifdef RT_USING_SMART
BEQ arch_ret_to_user
#endif
ERET
.endm
#endif /* __ARM64_CONTEXT_H__ */

View File

@ -0,0 +1,61 @@
/*
* Copyright (c) 2006-2020, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2024-03-28 Shell Move vector handling codes from context_gcc.S
*/
#ifndef __ASSEMBLY__
#define __ASSEMBLY__
#endif
#include "../include/vector_gcc.h"
#include "context_gcc.h"
#include <rtconfig.h>
#include <asm-generic.h>
#include <asm-fpu.h>
#include <armv8.h>
.section .text
vector_fiq:
.align 8
.globl vector_fiq
SAVE_IRQ_CONTEXT
bl rt_hw_trap_fiq
b rt_hw_irq_exit
.globl rt_thread_switch_interrupt_flag
.globl rt_hw_context_switch_interrupt_do
/**
* void rt_hw_vector_irq_sched(void *eframe)
* @brief do IRQ scheduling
*/
rt_hw_vector_irq_sched:
.globl rt_hw_vector_irq_sched
.align 8
/**
* if rt_thread_switch_interrupt_flag set, jump to
* rt_hw_context_switch_interrupt_do and don't return
*/
ldr x1, =rt_thread_switch_interrupt_flag
ldr x2, [x1]
cmp x2, #1
bne 1f
/* clear flag */
mov x2, #0
str x2, [x1]
bl rt_hw_context_switch_interrupt_do
1:
b rt_hw_irq_exit