UefiCpuPkg/MtrrLib: refine MtrrLibProgramFixedMtrr()

The patch replaces some if-checks with assertions because
they are impossible to happen.

Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Ruiyu Ni <ruiyu.ni@intel.com>
Cc: Michael D Kinney <michael.d.kinney@intel.com>
Cc: Eric Dong <eric.dong@intel.com>
Reviewed-by: Jiewen Yao <jiewen.yao@intel.com>
This commit is contained in:
Ruiyu Ni 2017-09-26 23:09:50 +08:00
parent 222c49300d
commit 5fbb5adea8
1 changed files with 31 additions and 35 deletions

View File

@ -466,10 +466,10 @@ MtrrGetVariableMtrr (
@param[in] Type The memory type to set. @param[in] Type The memory type to set.
@param[in, out] Base The base address of memory range. @param[in, out] Base The base address of memory range.
@param[in, out] Length The length of memory range. @param[in, out] Length The length of memory range.
@param[in, out] LastMsrNum On input, the last index of the fixed MTRR MSR to program. @param[in, out] LastMsrIndex On input, the last index of the fixed MTRR MSR to program.
On return, the current index of the fixed MTRR MSR to program. On return, the current index of the fixed MTRR MSR to program.
@param[out] ReturnClearMask The bits to clear in the fixed MTRR MSR. @param[out] ClearMask The bits to clear in the fixed MTRR MSR.
@param[out] ReturnOrMask The bits to set in the fixed MTRR MSR. @param[out] OrMask The bits to set in the fixed MTRR MSR.
@retval RETURN_SUCCESS The cache type was updated successfully @retval RETURN_SUCCESS The cache type was updated successfully
@retval RETURN_UNSUPPORTED The requested range or cache type was invalid @retval RETURN_UNSUPPORTED The requested range or cache type was invalid
@ -481,27 +481,25 @@ MtrrLibProgramFixedMtrr (
IN MTRR_MEMORY_CACHE_TYPE Type, IN MTRR_MEMORY_CACHE_TYPE Type,
IN OUT UINT64 *Base, IN OUT UINT64 *Base,
IN OUT UINT64 *Length, IN OUT UINT64 *Length,
IN OUT UINT32 *LastMsrNum, IN OUT UINT32 *LastMsrIndex,
OUT UINT64 *ReturnClearMask, OUT UINT64 *ClearMask,
OUT UINT64 *ReturnOrMask OUT UINT64 *OrMask
) )
{ {
UINT32 MsrNum; UINT32 MsrIndex;
UINT32 LeftByteShift; UINT32 LeftByteShift;
UINT32 RightByteShift; UINT32 RightByteShift;
UINT64 OrMask;
UINT64 ClearMask;
UINT64 SubLength; UINT64 SubLength;
// //
// Find the fixed MTRR index to be programmed // Find the fixed MTRR index to be programmed
// //
for (MsrNum = *LastMsrNum + 1; MsrNum < MTRR_NUMBER_OF_FIXED_MTRR; MsrNum++) { for (MsrIndex = *LastMsrIndex + 1; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {
if ((*Base >= mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) && if ((*Base >= mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) &&
(*Base < (*Base <
( (
mMtrrLibFixedMtrrTable[MsrNum].BaseAddress + mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress +
(8 * mMtrrLibFixedMtrrTable[MsrNum].Length) (8 * mMtrrLibFixedMtrrTable[MsrIndex].Length)
) )
) )
) { ) {
@ -509,65 +507,63 @@ MtrrLibProgramFixedMtrr (
} }
} }
if (MsrNum == MTRR_NUMBER_OF_FIXED_MTRR) { ASSERT (MsrIndex != ARRAY_SIZE (mMtrrLibFixedMtrrTable));
return RETURN_UNSUPPORTED;
}
// //
// Find the begin offset in fixed MTRR and calculate byte offset of left shift // Find the begin offset in fixed MTRR and calculate byte offset of left shift
// //
LeftByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) if ((((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {
/ mMtrrLibFixedMtrrTable[MsrNum].Length; //
// Base address should be aligned to the begin of a certain Fixed MTRR range.
if (LeftByteShift >= 8) { //
return RETURN_UNSUPPORTED; return RETURN_UNSUPPORTED;
} }
LeftByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) / mMtrrLibFixedMtrrTable[MsrIndex].Length;
ASSERT (LeftByteShift < 8);
// //
// Find the end offset in fixed MTRR and calculate byte offset of right shift // Find the end offset in fixed MTRR and calculate byte offset of right shift
// //
SubLength = mMtrrLibFixedMtrrTable[MsrNum].Length * (8 - LeftByteShift); SubLength = mMtrrLibFixedMtrrTable[MsrIndex].Length * (8 - LeftByteShift);
if (*Length >= SubLength) { if (*Length >= SubLength) {
RightByteShift = 0; RightByteShift = 0;
} else { } else {
RightByteShift = 8 - LeftByteShift - if (((UINT32)(*Length) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {
(UINT32)(*Length) / mMtrrLibFixedMtrrTable[MsrNum].Length; //
if ((LeftByteShift >= 8) || // Length should be aligned to the end of a certain Fixed MTRR range.
(((UINT32)(*Length) % mMtrrLibFixedMtrrTable[MsrNum].Length) != 0) //
) {
return RETURN_UNSUPPORTED; return RETURN_UNSUPPORTED;
} }
RightByteShift = 8 - LeftByteShift - (UINT32)(*Length) / mMtrrLibFixedMtrrTable[MsrIndex].Length;
// //
// Update SubLength by actual length // Update SubLength by actual length
// //
SubLength = *Length; SubLength = *Length;
} }
ClearMask = CLEAR_SEED; *ClearMask = CLEAR_SEED;
OrMask = MultU64x32 (OR_SEED, (UINT32) Type); *OrMask = MultU64x32 (OR_SEED, (UINT32) Type);
if (LeftByteShift != 0) { if (LeftByteShift != 0) {
// //
// Clear the low bits by LeftByteShift // Clear the low bits by LeftByteShift
// //
ClearMask &= LShiftU64 (ClearMask, LeftByteShift * 8); *ClearMask &= LShiftU64 (*ClearMask, LeftByteShift * 8);
OrMask &= LShiftU64 (OrMask, LeftByteShift * 8); *OrMask &= LShiftU64 (*OrMask, LeftByteShift * 8);
} }
if (RightByteShift != 0) { if (RightByteShift != 0) {
// //
// Clear the high bits by RightByteShift // Clear the high bits by RightByteShift
// //
ClearMask &= RShiftU64 (ClearMask, RightByteShift * 8); *ClearMask &= RShiftU64 (*ClearMask, RightByteShift * 8);
OrMask &= RShiftU64 (OrMask, RightByteShift * 8); *OrMask &= RShiftU64 (*OrMask, RightByteShift * 8);
} }
*Length -= SubLength; *Length -= SubLength;
*Base += SubLength; *Base += SubLength;
*LastMsrNum = MsrNum; *LastMsrIndex = MsrIndex;
*ReturnClearMask = ClearMask;
*ReturnOrMask = OrMask;
return RETURN_SUCCESS; return RETURN_SUCCESS;
} }