diff --git a/ArmPkg/Include/Library/ArmLib.h b/ArmPkg/Include/Library/ArmLib.h index 898b195d65..32886aaf49 100644 --- a/ArmPkg/Include/Library/ArmLib.h +++ b/ArmPkg/Include/Library/ArmLib.h @@ -190,6 +190,12 @@ ArmCleanDataCache ( VOID ); +VOID +EFIAPI +ArmCleanDataCacheToPoU ( + VOID + ); + VOID EFIAPI ArmInvalidateInstructionCache ( diff --git a/ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c b/ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c index b339c50d25..0dd453003c 100644 --- a/ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c +++ b/ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c @@ -68,8 +68,8 @@ InvalidateInstructionCacheRange ( IN UINTN Length ) { - CacheRangeOperation(Address, Length, ArmCleanDataCache, ArmCleanDataCacheEntryByMVA); - ArmInvalidateInstructionCache(); + CacheRangeOperation (Address, Length, ArmCleanDataCacheToPoU, ArmCleanDataCacheEntryByMVA); + ArmInvalidateInstructionCache (); return Address; } diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c index 1ffdb7f83f..d9cf8826ce 100644 --- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c +++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c @@ -196,9 +196,8 @@ ArmV7DataCacheOperation ( UINTN SavedInterruptState; SavedInterruptState = ArmGetInterruptState (); - ArmDisableInterrupts(); + ArmDisableInterrupts (); - ArmV7AllDataCachesOperation (DataCacheOperation); ArmDrainWriteBuffer (); @@ -208,6 +207,26 @@ ArmV7DataCacheOperation ( } } + +VOID +ArmV7PoUDataCacheOperation ( + IN ARM_V7_CACHE_OPERATION DataCacheOperation + ) +{ + UINTN SavedInterruptState; + + SavedInterruptState = ArmGetInterruptState (); + ArmDisableInterrupts (); + + ArmV7PerformPoUDataCacheOperation (DataCacheOperation); + + ArmDrainWriteBuffer (); + + if (SavedInterruptState) { + ArmEnableInterrupts (); + } +} + VOID EFIAPI ArmInvalidateDataCache ( @@ -235,3 +254,11 @@ ArmCleanDataCache ( ArmV7DataCacheOperation (ArmCleanDataCacheEntryBySetWay); } +VOID +EFIAPI +ArmCleanDataCacheToPoU ( + VOID + ) +{ + ArmV7PoUDataCacheOperation (ArmCleanDataCacheEntryBySetWay); +} diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h index b98407cfbc..66995c7bb4 100644 --- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h +++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h @@ -15,6 +15,7 @@ #ifndef __ARM_V7_LIB_H__ #define __ARM_V7_LIB_H__ +typedef VOID (*ARM_V7_CACHE_OPERATION)(UINT32); VOID EFIAPI @@ -34,6 +35,12 @@ ArmCleanDataCacheEntryBySetWay ( IN UINT32 SetWayFormat ); +VOID +EFIAPI +ArmCleanDataCacheToPoUEntryBySetWay ( + IN UINT32 SetWayFormat + ); + VOID EFIAPI ArmCleanInvalidateDataCacheEntryBySetWay ( @@ -76,5 +83,15 @@ ArmDisableFiq ( VOID ); +VOID +ArmV7PerformPoUDataCacheOperation ( + IN ARM_V7_CACHE_OPERATION DataCacheOperation + ); + +VOID +ArmV7AllDataCachesOperation ( + IN ARM_V7_CACHE_OPERATION DataCacheOperation + ); + #endif // __ARM_V7_LIB_H__ diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S index fb1ca2dee2..3bb601541d 100644 --- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S +++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S @@ -38,6 +38,7 @@ GCC_ASM_EXPORT (ArmDisableBranchPrediction) GCC_ASM_EXPORT (ArmSetLowVectors) GCC_ASM_EXPORT (ArmSetHighVectors) GCC_ASM_EXPORT (ArmV7AllDataCachesOperation) +GCC_ASM_EXPORT (ArmV7PerformPoUDataCacheOperation) GCC_ASM_EXPORT (ArmDataMemoryBarrier) GCC_ASM_EXPORT (ArmDataSyncronizationBarrier) GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier) @@ -267,6 +268,55 @@ L_Finished: ldmfd SP!, {r4-r12, lr} bx LR +ASM_PFX(ArmV7PerformPoUDataCacheOperation): + stmfd SP!,{r4-r12, LR} + mov R1, R0 @ Save Function call in R1 + mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR + ands R3, R6, #0x38000000 @ Mask out all but Level of Unification (LoU) + mov R3, R3, LSR #26 @ Cache level value (naturally aligned) + beq Finished2 + mov R10, #0 + +Loop4: + add R2, R10, R10, LSR #1 @ Work out 3xcachelevel + mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level + and R12, R12, #7 @ get those 3 bits alone + cmp R12, #2 + blt Skip2 @ no cache or only instruction cache at this level + mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction + isb @ isb to sync the change to the CacheSizeID reg + mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR) + and R2, R12, #0x7 @ extract the line length field + add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes) + ldr R4, =0x3FF + ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned) + clz R5, R4 @ R5 is the bit position of the way size increment + ldr R7, =0x00007FFF + ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned) + +Loop5: + mov R9, R4 @ R9 working copy of the max way size (right aligned) + +Loop6: + orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11 + orr R0, R0, R7, LSL R2 @ factor in the index number + + blx R1 + + subs R9, R9, #1 @ decrement the way number + bge Loop6 + subs R7, R7, #1 @ decrement the index + bge Loop5 +Skip2: + add R10, R10, #2 @ increment the cache number + cmp R3, R10 + bgt Loop4 + +Finished2: + dsb + ldmfd SP!, {r4-r12, lr} + bx LR + ASM_PFX(ArmDataMemoryBarrier): dmb bx LR diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm index fc28b0d922..28a4564aca 100644 --- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm +++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm @@ -35,6 +35,7 @@ EXPORT ArmSetLowVectors EXPORT ArmSetHighVectors EXPORT ArmV7AllDataCachesOperation + EXPORT ArmV7PerformPoUDataCacheOperation EXPORT ArmDataMemoryBarrier EXPORT ArmDataSyncronizationBarrier EXPORT ArmInstructionSynchronizationBarrier @@ -261,6 +262,55 @@ Finished ldmfd SP!, {r4-r12, lr} bx LR +ArmV7PerformPoUDataCacheOperation + stmfd SP!,{r4-r12, LR} + mov R1, R0 ; Save Function call in R1 + mrc p15, 1, R6, c0, c0, 1 ; Read CLIDR + ands R3, R6, #&38000000 ; Mask out all but Level of Unification (LoU) + mov R3, R3, LSR #26 ; Cache level value (naturally aligned) + beq Finished2 + mov R10, #0 + +Loop4 + add R2, R10, R10, LSR #1 ; Work out 3xcachelevel + mov R12, R6, LSR R2 ; bottom 3 bits are the Cache type for this level + and R12, R12, #7 ; get those 3 bits alone + cmp R12, #2 + blt Skip2 ; no cache or only instruction cache at this level + mcr p15, 2, R10, c0, c0, 0 ; write the Cache Size selection register (CSSELR) // OR in 1 for Instruction + isb ; isb to sync the change to the CacheSizeID reg + mrc p15, 1, R12, c0, c0, 0 ; reads current Cache Size ID register (CCSIDR) + and R2, R12, #&7 ; extract the line length field + add R2, R2, #4 ; add 4 for the line length offset (log2 16 bytes) + ldr R4, =0x3FF + ands R4, R4, R12, LSR #3 ; R4 is the max number on the way size (right aligned) + clz R5, R4 ; R5 is the bit position of the way size increment + ldr R7, =0x00007FFF + ands R7, R7, R12, LSR #13 ; R7 is the max number of the index size (right aligned) + +Loop5 + mov R9, R4 ; R9 working copy of the max way size (right aligned) + +Loop6 + orr R0, R10, R9, LSL R5 ; factor in the way number and cache number into R11 + orr R0, R0, R7, LSL R2 ; factor in the index number + + blx R1 + + subs R9, R9, #1 ; decrement the way number + bge Loop6 + subs R7, R7, #1 ; decrement the index + bge Loop5 +Skip2 + add R10, R10, #2 ; increment the cache number + cmp R3, R10 + bgt Loop4 + +Finished2 + dsb + ldmfd SP!, {r4-r12, lr} + bx LR + ArmDataMemoryBarrier dmb bx LR diff --git a/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h b/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h index 171d261c2a..25a043d03b 100644 --- a/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h +++ b/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h @@ -56,8 +56,6 @@ #define CACHE_ARCHITECTURE_UNIFIED (0UL) #define CACHE_ARCHITECTURE_SEPARATE (1UL) -typedef VOID (*ARM_V7_CACHE_OPERATION)(UINT32); - VOID CPSRMaskInsert ( IN UINT32 Mask, @@ -69,22 +67,14 @@ CPSRRead ( VOID ); - UINT32 ReadCCSIDR ( IN UINT32 CSSELR ); - UINT32 ReadCLIDR ( VOID ); - -VOID -ArmV7AllDataCachesOperation ( - IN ARM_V7_CACHE_OPERATION DataCacheOperation - ); - #endif // __ARM_LIB_PRIVATE_H__