mirror of https://github.com/acidanthera/audk.git
ArmPkg/ArmExceptionLib: fold exception handler prologue into vector table
Unlike the AArch32 vector table, which has room for a single instruction for each exception type, the AArch64 exception table has 128 byte slots, which can easily hold the shared prologues that are emitted out of line. So refactor this code into a single macro, and expand it into each vector table slot. Since the address of the command handler entry point is no longer patched in by the C code, we can just emit the literal into each vector entry directly. Contributed-under: TianoCore Contribution Agreement 1.0 Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Reviewed-by: Leif Lindholm <leif.lindholm@linaro.org> Reviewed-by: Eugene Cohen <eugene@hp.com>
This commit is contained in:
parent
a34608ca96
commit
0dbbaa5573
|
@ -99,8 +99,6 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
GCC_ASM_EXPORT(ExceptionHandlersEnd)
|
GCC_ASM_EXPORT(ExceptionHandlersEnd)
|
||||||
GCC_ASM_EXPORT(CommonExceptionEntry)
|
|
||||||
GCC_ASM_EXPORT(AsmCommonExceptionEntry)
|
|
||||||
GCC_ASM_EXPORT(CommonCExceptionHandler)
|
GCC_ASM_EXPORT(CommonCExceptionHandler)
|
||||||
|
|
||||||
.text
|
.text
|
||||||
|
@ -172,90 +170,12 @@ ASM_PFX(ExceptionHandlersStart):
|
||||||
VECTOR_BASE(ExceptionHandlersStart)
|
VECTOR_BASE(ExceptionHandlersStart)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
//
|
|
||||||
// Current EL with SP0 : 0x0 - 0x180
|
|
||||||
//
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_SYNC)
|
|
||||||
ASM_PFX(SynchronousExceptionSP0):
|
|
||||||
b ASM_PFX(SynchronousExceptionEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_IRQ)
|
|
||||||
ASM_PFX(IrqSP0):
|
|
||||||
b ASM_PFX(IrqEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_FIQ)
|
|
||||||
ASM_PFX(FiqSP0):
|
|
||||||
b ASM_PFX(FiqEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_SERR)
|
|
||||||
ASM_PFX(SErrorSP0):
|
|
||||||
b ASM_PFX(SErrorEntry)
|
|
||||||
|
|
||||||
//
|
|
||||||
// Current EL with SPx: 0x200 - 0x380
|
|
||||||
//
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_SYNC)
|
|
||||||
ASM_PFX(SynchronousExceptionSPx):
|
|
||||||
b ASM_PFX(SynchronousExceptionEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_IRQ)
|
|
||||||
ASM_PFX(IrqSPx):
|
|
||||||
b ASM_PFX(IrqEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_FIQ)
|
|
||||||
ASM_PFX(FiqSPx):
|
|
||||||
b ASM_PFX(FiqEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_SERR)
|
|
||||||
ASM_PFX(SErrorSPx):
|
|
||||||
b ASM_PFX(SErrorEntry)
|
|
||||||
|
|
||||||
//
|
|
||||||
// Lower EL using AArch64 : 0x400 - 0x580
|
|
||||||
//
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_SYNC)
|
|
||||||
ASM_PFX(SynchronousExceptionA64):
|
|
||||||
b ASM_PFX(SynchronousExceptionEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_IRQ)
|
|
||||||
ASM_PFX(IrqA64):
|
|
||||||
b ASM_PFX(IrqEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_FIQ)
|
|
||||||
ASM_PFX(FiqA64):
|
|
||||||
b ASM_PFX(FiqEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_SERR)
|
|
||||||
ASM_PFX(SErrorA64):
|
|
||||||
b ASM_PFX(SErrorEntry)
|
|
||||||
|
|
||||||
//
|
|
||||||
// Lower EL using AArch32 : 0x600 - 0x780
|
|
||||||
//
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_SYNC)
|
|
||||||
ASM_PFX(SynchronousExceptionA32):
|
|
||||||
b ASM_PFX(SynchronousExceptionEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_IRQ)
|
|
||||||
ASM_PFX(IrqA32):
|
|
||||||
b ASM_PFX(IrqEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_FIQ)
|
|
||||||
ASM_PFX(FiqA32):
|
|
||||||
b ASM_PFX(FiqEntry)
|
|
||||||
|
|
||||||
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_SERR)
|
|
||||||
ASM_PFX(SErrorA32):
|
|
||||||
b ASM_PFX(SErrorEntry)
|
|
||||||
|
|
||||||
VECTOR_END(ExceptionHandlersStart)
|
|
||||||
|
|
||||||
#undef REG_PAIR
|
#undef REG_PAIR
|
||||||
#undef REG_ONE
|
#undef REG_ONE
|
||||||
#define REG_PAIR(REG1, REG2, OFFSET, CONTEXT_SIZE) stp REG1, REG2, [sp, #(OFFSET-CONTEXT_SIZE)]
|
#define REG_PAIR(REG1, REG2, OFFSET, CONTEXT_SIZE) stp REG1, REG2, [sp, #(OFFSET-CONTEXT_SIZE)]
|
||||||
#define REG_ONE(REG1, OFFSET, CONTEXT_SIZE) stur REG1, [sp, #(OFFSET-CONTEXT_SIZE)]
|
#define REG_ONE(REG1, OFFSET, CONTEXT_SIZE) stur REG1, [sp, #(OFFSET-CONTEXT_SIZE)]
|
||||||
|
|
||||||
ASM_PFX(SynchronousExceptionEntry):
|
.macro ExceptionEntry, val
|
||||||
// Move the stackpointer so we can reach our structure with the str instruction.
|
// Move the stackpointer so we can reach our structure with the str instruction.
|
||||||
sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
|
sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
|
||||||
|
|
||||||
|
@ -264,50 +184,96 @@ ASM_PFX(SynchronousExceptionEntry):
|
||||||
ALL_GP_REGS
|
ALL_GP_REGS
|
||||||
|
|
||||||
// Record the type of exception that occurred.
|
// Record the type of exception that occurred.
|
||||||
mov x0, #EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
|
mov x0, #\val
|
||||||
|
|
||||||
// Jump to our general handler to deal with all the common parts and process the exception.
|
// Jump to our general handler to deal with all the common parts and process the exception.
|
||||||
ldr x1, ASM_PFX(CommonExceptionEntry)
|
ldr x1, =ASM_PFX(CommonExceptionEntry)
|
||||||
br x1
|
br x1
|
||||||
|
.ltorg
|
||||||
ASM_PFX(IrqEntry):
|
.endm
|
||||||
sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
|
|
||||||
ALL_GP_REGS
|
|
||||||
mov x0, #EXCEPT_AARCH64_IRQ
|
|
||||||
ldr x1, ASM_PFX(CommonExceptionEntry)
|
|
||||||
br x1
|
|
||||||
|
|
||||||
ASM_PFX(FiqEntry):
|
|
||||||
sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
|
|
||||||
ALL_GP_REGS
|
|
||||||
mov x0, #EXCEPT_AARCH64_FIQ
|
|
||||||
ldr x1, ASM_PFX(CommonExceptionEntry)
|
|
||||||
br x1
|
|
||||||
|
|
||||||
ASM_PFX(SErrorEntry):
|
|
||||||
sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
|
|
||||||
ALL_GP_REGS
|
|
||||||
mov x0, #EXCEPT_AARCH64_SERROR
|
|
||||||
ldr x1, ASM_PFX(CommonExceptionEntry)
|
|
||||||
br x1
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// This gets patched by the C code that patches in the vector table
|
// Current EL with SP0 : 0x0 - 0x180
|
||||||
//
|
//
|
||||||
.align 3
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_SYNC)
|
||||||
ASM_PFX(CommonExceptionEntry):
|
ASM_PFX(SynchronousExceptionSP0):
|
||||||
.8byte ASM_PFX(AsmCommonExceptionEntry)
|
ExceptionEntry EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_IRQ)
|
||||||
|
ASM_PFX(IrqSP0):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_IRQ
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_FIQ)
|
||||||
|
ASM_PFX(FiqSP0):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_FIQ
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_SERR)
|
||||||
|
ASM_PFX(SErrorSP0):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_SERROR
|
||||||
|
|
||||||
|
//
|
||||||
|
// Current EL with SPx: 0x200 - 0x380
|
||||||
|
//
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_SYNC)
|
||||||
|
ASM_PFX(SynchronousExceptionSPx):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_IRQ)
|
||||||
|
ASM_PFX(IrqSPx):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_IRQ
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_FIQ)
|
||||||
|
ASM_PFX(FiqSPx):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_FIQ
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_SERR)
|
||||||
|
ASM_PFX(SErrorSPx):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_SERROR
|
||||||
|
|
||||||
|
//
|
||||||
|
// Lower EL using AArch64 : 0x400 - 0x580
|
||||||
|
//
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_SYNC)
|
||||||
|
ASM_PFX(SynchronousExceptionA64):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_IRQ)
|
||||||
|
ASM_PFX(IrqA64):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_IRQ
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_FIQ)
|
||||||
|
ASM_PFX(FiqA64):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_FIQ
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_SERR)
|
||||||
|
ASM_PFX(SErrorA64):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_SERROR
|
||||||
|
|
||||||
|
//
|
||||||
|
// Lower EL using AArch32 : 0x600 - 0x780
|
||||||
|
//
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_SYNC)
|
||||||
|
ASM_PFX(SynchronousExceptionA32):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_IRQ)
|
||||||
|
ASM_PFX(IrqA32):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_IRQ
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_FIQ)
|
||||||
|
ASM_PFX(FiqA32):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_FIQ
|
||||||
|
|
||||||
|
VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_SERR)
|
||||||
|
ASM_PFX(SErrorA32):
|
||||||
|
ExceptionEntry EXCEPT_AARCH64_SERROR
|
||||||
|
|
||||||
|
VECTOR_END(ExceptionHandlersStart)
|
||||||
|
|
||||||
ASM_PFX(ExceptionHandlersEnd):
|
ASM_PFX(ExceptionHandlersEnd):
|
||||||
|
|
||||||
|
|
||||||
|
ASM_PFX(CommonExceptionEntry):
|
||||||
//
|
|
||||||
// This code runs from CpuDxe driver loaded address. It is patched into
|
|
||||||
// CommonExceptionEntry.
|
|
||||||
//
|
|
||||||
ASM_PFX(AsmCommonExceptionEntry):
|
|
||||||
/* NOTE:
|
/* NOTE:
|
||||||
We have to break up the save code because the immediate value to be used
|
We have to break up the save code because the immediate value to be used
|
||||||
with the SP is too big to do it all in one step so we need to shuffle the SP
|
with the SP is too big to do it all in one step so we need to shuffle the SP
|
||||||
|
|
Loading…
Reference in New Issue