ArmPkg/ArmLib: Revert change 'Fixed field shifting in CLIDR_EL1 (AArch64)'

The shift by 1 on the left was expected. It eases the access to CSSELR and set/way operations
where the cache level field is at the BIT1 position.

Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Olivier Martin <olivier.martin@arm.com>



git-svn-id: https://svn.code.sf.net/p/edk2/code/trunk/edk2@14704 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
Olivier Martin 2013-09-23 09:42:05 +00:00 committed by oliviermartin
parent cc271ec311
commit 433a49a094
1 changed files with 4 additions and 2 deletions

View File

@ -320,7 +320,8 @@ ASM_PFX(AArch64AllDataCachesOperation):
mov x1, x0 // Save Function call in x1
mrs x6, clidr_el1 // Read EL1 CLIDR
and x3, x6, #0x7000000 // Mask out all but Level of Coherency (LoC)
lsr x3, x3, #24 // Left align cache level value
lsr x3, x3, #23 // Left align cache level value - the level is shifted by 1 to the
// right to ease the access to CSSELR and the Set/Way operation.
cbz x3, L_Finished // No need to clean if LoC is 0
mov x10, #0 // Start clean at cache level 0
b Loop1
@ -332,7 +333,8 @@ ASM_PFX(AArch64PerformPoUDataCacheOperation):
mov x1, x0 // Save Function call in x1
mrs x6, clidr_el1 // Read EL1 CLIDR
and x3, x6, #0x38000000 // Mask out all but Point of Unification (PoU)
lsr x3, x3, #27 // Left align cache level value
lsr x3, x3, #26 // Left align cache level value - the level is shifted by 1 to the
// right to ease the access to CSSELR and the Set/Way operation.
cbz x3, L_Finished // No need to clean if LoC is 0
mov x10, #0 // Start clean at cache level 0