diff --git a/UefiCpuPkg/Library/SmmRelocationLib/Ia32/SmmInit.nasm b/UefiCpuPkg/Library/SmmRelocationLib/Ia32/SmmInit.nasm index 3d845e9e16..8916cb7d06 100644 --- a/UefiCpuPkg/Library/SmmRelocationLib/Ia32/SmmInit.nasm +++ b/UefiCpuPkg/Library/SmmRelocationLib/Ia32/SmmInit.nasm @@ -18,11 +18,11 @@ extern ASM_PFX(SmmInitHandler) extern ASM_PFX(mRebasedFlag) extern ASM_PFX(mSmmRelocationOriginalAddress) -global ASM_PFX(gPatchSmmCr3) -global ASM_PFX(gPatchSmmCr4) -global ASM_PFX(gPatchSmmCr0) +global ASM_PFX(gPatchSmmInitCr3) +global ASM_PFX(gPatchSmmInitCr4) +global ASM_PFX(gPatchSmmInitCr0) global ASM_PFX(gPatchSmmInitStack) -global ASM_PFX(gcSmiInitGdtr) +global ASM_PFX(gcSmmInitGdtr) global ASM_PFX(gcSmmInitSize) global ASM_PFX(gcSmmInitTemplate) @@ -83,7 +83,7 @@ CodeSeg64: DB 0 ; BaseHigh GDT_SIZE equ $ - NullSeg -ASM_PFX(gcSmiInitGdtr): +ASM_PFX(gcSmmInitGdtr): DW GDT_SIZE - 1 DD NullSeg @@ -100,18 +100,18 @@ ASM_PFX(SmmStartup): and ebx, BIT20 ; extract NX capability bit shr ebx, 9 ; shift bit to IA32_EFER.NXE[BIT11] position mov eax, strict dword 0 ; source operand will be patched -ASM_PFX(gPatchSmmCr3): +ASM_PFX(gPatchSmmInitCr3): mov cr3, eax -o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))] +o32 lgdt [cs:ebp + (ASM_PFX(gcSmmInitGdtr) - ASM_PFX(SmmStartup))] mov eax, strict dword 0 ; source operand will be patched -ASM_PFX(gPatchSmmCr4): +ASM_PFX(gPatchSmmInitCr4): mov cr4, eax mov ecx, 0xc0000080 ; IA32_EFER MSR rdmsr or eax, ebx ; set NXE bit if NX is available wrmsr mov eax, strict dword 0 ; source operand will be patched -ASM_PFX(gPatchSmmCr0): +ASM_PFX(gPatchSmmInitCr0): mov di, PROTECT_MODE_DS mov cr0, eax jmp PROTECT_MODE_CS : dword @32bit diff --git a/UefiCpuPkg/Library/SmmRelocationLib/InternalSmmRelocationLib.h b/UefiCpuPkg/Library/SmmRelocationLib/InternalSmmRelocationLib.h index a9d3f271a9..ede61b956f 100644 --- a/UefiCpuPkg/Library/SmmRelocationLib/InternalSmmRelocationLib.h +++ b/UefiCpuPkg/Library/SmmRelocationLib/InternalSmmRelocationLib.h @@ -32,13 +32,13 @@ #include #include -extern IA32_DESCRIPTOR gcSmiInitGdtr; +extern IA32_DESCRIPTOR gcSmmInitGdtr; extern CONST UINT16 gcSmmInitSize; extern CONST UINT8 gcSmmInitTemplate[]; -X86_ASSEMBLY_PATCH_LABEL gPatchSmmCr0; -X86_ASSEMBLY_PATCH_LABEL gPatchSmmCr3; -X86_ASSEMBLY_PATCH_LABEL gPatchSmmCr4; +X86_ASSEMBLY_PATCH_LABEL gPatchSmmInitCr0; +X86_ASSEMBLY_PATCH_LABEL gPatchSmmInitCr3; +X86_ASSEMBLY_PATCH_LABEL gPatchSmmInitCr4; X86_ASSEMBLY_PATCH_LABEL gPatchSmmInitStack; // diff --git a/UefiCpuPkg/Library/SmmRelocationLib/SmmRelocationLib.c b/UefiCpuPkg/Library/SmmRelocationLib/SmmRelocationLib.c index 13e62b662d..ca98f06a05 100644 --- a/UefiCpuPkg/Library/SmmRelocationLib/SmmRelocationLib.c +++ b/UefiCpuPkg/Library/SmmRelocationLib/SmmRelocationLib.c @@ -24,7 +24,7 @@ EFI_PROCESSOR_INFORMATION *mProcessorInfo = NULL; // // IDT used during SMM Init // -IA32_DESCRIPTOR gcSmiIdtr; +IA32_DESCRIPTOR gcSmmInitIdtr; // // Smbase for all CPUs @@ -120,7 +120,7 @@ SmmInitHandler ( // // Update SMM IDT entries' code segment and load IDT // - AsmWriteIdtr (&gcSmiIdtr); + AsmWriteIdtr (&gcSmmInitIdtr); ApicId = GetApicId (); for (Index = 0; Index < mNumberOfCpus; Index++) { @@ -169,9 +169,9 @@ SmmRelocateBases ( // // Patch ASM code template with current CR0, CR3, and CR4 values // - PatchInstructionX86 (gPatchSmmCr0, AsmReadCr0 (), 4); - PatchInstructionX86 (gPatchSmmCr3, AsmReadCr3 (), 4); - PatchInstructionX86 (gPatchSmmCr4, AsmReadCr4 () & (~CR4_CET_ENABLE), 4); + PatchInstructionX86 (gPatchSmmInitCr0, AsmReadCr0 (), 4); + PatchInstructionX86 (gPatchSmmInitCr3, AsmReadCr3 (), 4); + PatchInstructionX86 (gPatchSmmInitCr4, AsmReadCr4 () & (~CR4_CET_ENABLE), 4); U8Ptr = (UINT8 *)(UINTN)(SMM_DEFAULT_SMBASE + SMM_HANDLER_OFFSET); CpuStatePtr = (SMRAM_SAVE_STATE_MAP *)(UINTN)(SMM_DEFAULT_SMBASE + SMRAM_SAVE_STATE_MAP_OFFSET); @@ -251,15 +251,15 @@ InitSmmIdt ( // There are 32 (not 255) entries in it since only processor // generated exceptions will be handled. // - gcSmiIdtr.Limit = (sizeof (IA32_IDT_GATE_DESCRIPTOR) * 32) - 1; + gcSmmInitIdtr.Limit = (sizeof (IA32_IDT_GATE_DESCRIPTOR) * 32) - 1; // // Allocate for IDT. // sizeof (UINTN) is for the PEI Services Table pointer. // - gcSmiIdtr.Base = (UINTN)AllocateZeroPool (gcSmiIdtr.Limit + 1 + sizeof (UINTN)); - ASSERT (gcSmiIdtr.Base != 0); - gcSmiIdtr.Base += sizeof (UINTN); + gcSmmInitIdtr.Base = (UINTN)AllocateZeroPool (gcSmmInitIdtr.Limit + 1 + sizeof (UINTN)); + ASSERT (gcSmmInitIdtr.Base != 0); + gcSmmInitIdtr.Base += sizeof (UINTN); // // Disable Interrupt, save InterruptState and save PEI IDT table @@ -272,17 +272,17 @@ InitSmmIdt ( // The PEI Services Table pointer will be stored in the sizeof (UINTN) bytes // immediately preceding the IDT in memory. // - PeiServices = (CONST EFI_PEI_SERVICES **)(*(UINTN *)(PeiIdtr.Base - sizeof (UINTN))); - (*(UINTN *)(gcSmiIdtr.Base - sizeof (UINTN))) = (UINTN)PeiServices; + PeiServices = (CONST EFI_PEI_SERVICES **)(*(UINTN *)(PeiIdtr.Base - sizeof (UINTN))); + (*(UINTN *)(gcSmmInitIdtr.Base - sizeof (UINTN))) = (UINTN)PeiServices; // // Load SMM temporary IDT table // - AsmWriteIdtr (&gcSmiIdtr); + AsmWriteIdtr (&gcSmmInitIdtr); // // Setup SMM default exception handlers, SMM IDT table - // will be updated and saved in gcSmiIdtr + // will be updated and saved in gcSmmInitIdtr // Status = InitializeCpuExceptionHandlers (NULL); ASSERT_EFI_ERROR (Status); diff --git a/UefiCpuPkg/Library/SmmRelocationLib/X64/SmmInit.nasm b/UefiCpuPkg/Library/SmmRelocationLib/X64/SmmInit.nasm index c790ad7ad8..8288b723c4 100644 --- a/UefiCpuPkg/Library/SmmRelocationLib/X64/SmmInit.nasm +++ b/UefiCpuPkg/Library/SmmRelocationLib/X64/SmmInit.nasm @@ -18,11 +18,11 @@ extern ASM_PFX(SmmInitHandler) extern ASM_PFX(mRebasedFlag) extern ASM_PFX(mSmmRelocationOriginalAddress) -global ASM_PFX(gPatchSmmCr3) -global ASM_PFX(gPatchSmmCr4) -global ASM_PFX(gPatchSmmCr0) +global ASM_PFX(gPatchSmmInitCr3) +global ASM_PFX(gPatchSmmInitCr4) +global ASM_PFX(gPatchSmmInitCr0) global ASM_PFX(gPatchSmmInitStack) -global ASM_PFX(gcSmiInitGdtr) +global ASM_PFX(gcSmmInitGdtr) global ASM_PFX(gcSmmInitSize) global ASM_PFX(gcSmmInitTemplate) global ASM_PFX(gPatchRebasedFlagAddr32) @@ -84,7 +84,7 @@ CodeSeg64: DB 0 ; BaseHigh GDT_SIZE equ $ - NullSeg -ASM_PFX(gcSmiInitGdtr): +ASM_PFX(gcSmmInitGdtr): DW GDT_SIZE - 1 DQ NullSeg @@ -100,11 +100,11 @@ ASM_PFX(SmmStartup): cpuid mov ebx, edx ; rdmsr will change edx. keep it in ebx. mov eax, strict dword 0 ; source operand will be patched -ASM_PFX(gPatchSmmCr3): +ASM_PFX(gPatchSmmInitCr3): mov cr3, eax -o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))] +o32 lgdt [cs:ebp + (ASM_PFX(gcSmmInitGdtr) - ASM_PFX(SmmStartup))] mov eax, strict dword 0 ; source operand will be patched -ASM_PFX(gPatchSmmCr4): +ASM_PFX(gPatchSmmInitCr4): or ah, 2 ; enable XMM registers access mov cr4, eax mov ecx, 0xc0000080 ; IA32_EFER MSR @@ -116,7 +116,7 @@ ASM_PFX(gPatchSmmCr4): .1: wrmsr mov eax, strict dword 0 ; source operand will be patched -ASM_PFX(gPatchSmmCr0): +ASM_PFX(gPatchSmmInitCr0): mov cr0, eax ; enable protected mode & paging jmp LONG_MODE_CS : dword 0 ; offset will be patched to @LongMode @PatchLongModeOffset: