diff --git a/UefiCpuPkg/Library/MpInitLib/DxeMpLib.c b/UefiCpuPkg/Library/MpInitLib/DxeMpLib.c index 9115ff9e3e..2c00d72dde 100644 --- a/UefiCpuPkg/Library/MpInitLib/DxeMpLib.c +++ b/UefiCpuPkg/Library/MpInitLib/DxeMpLib.c @@ -12,6 +12,7 @@ #include #include #include +#include #include #include @@ -85,6 +86,13 @@ GetWakeupBuffer ( { EFI_STATUS Status; EFI_PHYSICAL_ADDRESS StartAddress; + EFI_MEMORY_TYPE MemoryType; + + if (PcdGetBool (PcdSevEsIsEnabled)) { + MemoryType = EfiReservedMemoryType; + } else { + MemoryType = EfiBootServicesData; + } // // Try to allocate buffer below 1M for waking vector. @@ -97,7 +105,7 @@ GetWakeupBuffer ( StartAddress = 0x88000; Status = gBS->AllocatePages ( AllocateMaxAddress, - EfiBootServicesData, + MemoryType, EFI_SIZE_TO_PAGES (WakeupBufferSize), &StartAddress ); @@ -159,8 +167,10 @@ GetSevEsAPMemory ( VOID ) { - EFI_STATUS Status; - EFI_PHYSICAL_ADDRESS StartAddress; + EFI_STATUS Status; + EFI_PHYSICAL_ADDRESS StartAddress; + MSR_SEV_ES_GHCB_REGISTER Msr; + GHCB *Ghcb; // // Allocate 1 page for AP jump table page @@ -176,6 +186,16 @@ GetSevEsAPMemory ( DEBUG ((DEBUG_INFO, "Dxe: SevEsAPMemory = %lx\n", (UINTN) StartAddress)); + // + // Save the SevEsAPMemory as the AP jump table. + // + Msr.GhcbPhysicalAddress = AsmReadMsr64 (MSR_SEV_ES_GHCB); + Ghcb = Msr.Ghcb; + + VmgInit (Ghcb); + VmgExit (Ghcb, SVM_EXIT_AP_JUMP_TABLE, 0, (UINT64) (UINTN) StartAddress); + VmgDone (Ghcb); + return (UINTN) StartAddress; } @@ -330,17 +350,26 @@ RelocateApLoop ( BOOLEAN MwaitSupport; ASM_RELOCATE_AP_LOOP AsmRelocateApLoopFunc; UINTN ProcessorNumber; + UINTN StackStart; MpInitLibWhoAmI (&ProcessorNumber); CpuMpData = GetCpuMpData (); MwaitSupport = IsMwaitSupport (); + if (CpuMpData->SevEsIsEnabled) { + StackStart = CpuMpData->SevEsAPResetStackStart; + } else { + StackStart = mReservedTopOfApStack; + } AsmRelocateApLoopFunc = (ASM_RELOCATE_AP_LOOP) (UINTN) mReservedApLoopFunc; AsmRelocateApLoopFunc ( MwaitSupport, CpuMpData->ApTargetCState, CpuMpData->PmCodeSegment, - mReservedTopOfApStack - ProcessorNumber * AP_SAFE_STACK_SIZE, - (UINTN) &mNumberToFinish + StackStart - ProcessorNumber * AP_SAFE_STACK_SIZE, + (UINTN) &mNumberToFinish, + CpuMpData->Pm16CodeSegment, + CpuMpData->SevEsAPBuffer, + CpuMpData->WakeupBuffer ); // // It should never reach here @@ -374,6 +403,21 @@ MpInitChangeApLoopCallback ( while (mNumberToFinish > 0) { CpuPause (); } + + if (CpuMpData->SevEsIsEnabled && (CpuMpData->WakeupBuffer != (UINTN) -1)) { + // + // There are APs present. Re-use reserved memory area below 1MB from + // WakeupBuffer as the area to be used for transitioning to 16-bit mode + // in support of booting of the AP by an OS. + // + CopyMem ( + (VOID *) CpuMpData->WakeupBuffer, + (VOID *) (CpuMpData->AddressMap.RendezvousFunnelAddress + + CpuMpData->AddressMap.SwitchToRealPM16ModeOffset), + CpuMpData->AddressMap.SwitchToRealPM16ModeSize + ); + } + DEBUG ((DEBUG_INFO, "%a() done!\n", __FUNCTION__)); } diff --git a/UefiCpuPkg/Library/MpInitLib/Ia32/MpFuncs.nasm b/UefiCpuPkg/Library/MpInitLib/Ia32/MpFuncs.nasm index 309d53bf3b..7e81d24aa6 100644 --- a/UefiCpuPkg/Library/MpInitLib/Ia32/MpFuncs.nasm +++ b/UefiCpuPkg/Library/MpInitLib/Ia32/MpFuncs.nasm @@ -226,7 +226,10 @@ SwitchToRealProcStart: SwitchToRealProcEnd: ;------------------------------------------------------------------------------------- -; AsmRelocateApLoop (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish); +; AsmRelocateApLoop (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish, Pm16CodeSegment, SevEsAPJumpTable, WakeupBuffer); +; +; The last three parameters (Pm16CodeSegment, SevEsAPJumpTable and WakeupBuffer) are +; specific to SEV-ES support and are not applicable on IA32. ;------------------------------------------------------------------------------------- global ASM_PFX(AsmRelocateApLoop) ASM_PFX(AsmRelocateApLoop): diff --git a/UefiCpuPkg/Library/MpInitLib/MpLib.h b/UefiCpuPkg/Library/MpInitLib/MpLib.h index b1a9d99cb3..02652eaae1 100644 --- a/UefiCpuPkg/Library/MpInitLib/MpLib.h +++ b/UefiCpuPkg/Library/MpInitLib/MpLib.h @@ -293,7 +293,8 @@ struct _CPU_MP_DATA { UINT64 GhcbBase; }; -#define AP_RESET_STACK_SIZE 64 +#define AP_SAFE_STACK_SIZE 128 +#define AP_RESET_STACK_SIZE AP_SAFE_STACK_SIZE #pragma pack(1) @@ -350,7 +351,10 @@ VOID IN UINTN ApTargetCState, IN UINTN PmCodeSegment, IN UINTN TopOfApStack, - IN UINTN NumberToFinish + IN UINTN NumberToFinish, + IN UINTN Pm16CodeSegment, + IN UINTN SevEsAPJumpTable, + IN UINTN WakeupBuffer ); /** diff --git a/UefiCpuPkg/Library/MpInitLib/X64/MpFuncs.nasm b/UefiCpuPkg/Library/MpInitLib/X64/MpFuncs.nasm index 6956b408d0..5d30f35b20 100644 --- a/UefiCpuPkg/Library/MpInitLib/X64/MpFuncs.nasm +++ b/UefiCpuPkg/Library/MpInitLib/X64/MpFuncs.nasm @@ -465,6 +465,10 @@ BITS 16 ; - IP for Real Mode (two bytes) ; - CS for Real Mode (two bytes) ; + ; This label is also used with AsmRelocateApLoop. During MP finalization, + ; the code from PM16Mode to SwitchToRealProcEnd is copied to the start of + ; the WakeupBuffer, allowing a parked AP to be booted by an OS. + ; PM16Mode: mov eax, cr0 ; Read CR0 btr eax, 0 ; Set PE=0 @@ -487,32 +491,96 @@ PM16Mode: SwitchToRealProcEnd: ;------------------------------------------------------------------------------------- -; AsmRelocateApLoop (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish); +; AsmRelocateApLoop (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish, Pm16CodeSegment, SevEsAPJumpTable, WakeupBuffer); ;------------------------------------------------------------------------------------- global ASM_PFX(AsmRelocateApLoop) ASM_PFX(AsmRelocateApLoop): AsmRelocateApLoopStart: BITS 64 - cli ; Disable interrupt before switching to 32-bit mode - mov rax, [rsp + 40] ; CountTofinish - lock dec dword [rax] ; (*CountTofinish)-- - mov rsp, r9 + cmp qword [rsp + 56], 0 ; SevEsAPJumpTable + je NoSevEs + + ; + ; Perform some SEV-ES related setup before leaving 64-bit mode + ; push rcx push rdx - lea rsi, [PmEntry] ; rsi <- The start address of transition code + ; + ; Get the RDX reset value using CPUID + ; + mov rax, 1 + cpuid + mov rsi, rax ; Save off the reset value for RDX + + ; + ; Prepare the GHCB for the AP_HLT_LOOP VMGEXIT call + ; - Must be done while in 64-bit long mode so that writes to + ; the GHCB memory will be unencrypted. + ; - No NAE events can be generated once this is set otherwise + ; the AP_RESET_HOLD SW_EXITCODE will be overwritten. + ; + mov rcx, 0xc0010130 + rdmsr ; Retrieve current GHCB address + shl rdx, 32 + or rdx, rax + + mov rdi, rdx + xor rax, rax + mov rcx, 0x800 + shr rcx, 3 + rep stosq ; Clear the GHCB + + mov rax, 0x80000004 ; VMGEXIT AP_RESET_HOLD + mov [rdx + 0x390], rax + + pop rdx + pop rcx + +NoSevEs: + cli ; Disable interrupt before switching to 32-bit mode + mov rax, [rsp + 40] ; CountTofinish + lock dec dword [rax] ; (*CountTofinish)-- + + mov r10, [rsp + 48] ; Pm16CodeSegment + mov rax, [rsp + 56] ; SevEsAPJumpTable + mov rbx, [rsp + 64] ; WakeupBuffer + mov rsp, r9 ; TopOfApStack + + push rax ; Save SevEsAPJumpTable + push rbx ; Save WakeupBuffer + push r10 ; Save Pm16CodeSegment + push rcx ; Save MwaitSupport + push rdx ; Save ApTargetCState + + lea rax, [PmEntry] ; rax <- The start address of transition code push r8 - push rsi - DB 0x48 - retf + push rax + + ; + ; Clear R8 - R15, for reset, before going into 32-bit mode + ; + xor r8, r8 + xor r9, r9 + xor r10, r10 + xor r11, r11 + xor r12, r12 + xor r13, r13 + xor r14, r14 + xor r15, r15 + + ; + ; Far return into 32-bit mode + ; +o64 retf + BITS 32 PmEntry: mov eax, cr0 btr eax, 31 ; Clear CR0.PG mov cr0, eax ; Disable paging and caches - mov ebx, edx ; Save EntryPoint to rbx, for rdmsr will overwrite rdx mov ecx, 0xc0000080 rdmsr and ah, ~ 1 ; Clear LME @@ -525,6 +593,8 @@ PmEntry: add esp, 4 pop ecx, add esp, 4 + +MwaitCheck: cmp cl, 1 ; Check mwait-monitor support jnz HltLoop mov ebx, edx ; Save C-State to ebx @@ -538,10 +608,50 @@ MwaitLoop: shl eax, 4 mwait jmp MwaitLoop + HltLoop: + pop edx ; PM16CodeSegment + add esp, 4 + pop ebx ; WakeupBuffer + add esp, 4 + pop eax ; SevEsAPJumpTable + add esp, 4 + cmp eax, 0 ; Check for SEV-ES + je DoHlt + + cli + ; + ; SEV-ES is enabled, use VMGEXIT (GHCB information already + ; set by caller) + ; +BITS 64 + rep vmmcall +BITS 32 + + ; + ; Back from VMGEXIT AP_HLT_LOOP + ; Push the FLAGS/CS/IP values to use + ; + push word 0x0002 ; EFLAGS + xor ecx, ecx + mov cx, [eax + 2] ; CS + push cx + mov cx, [eax] ; IP + push cx + push word 0x0000 ; For alignment, will be discarded + + push edx + push ebx + + mov edx, esi ; Restore RDX reset value + + retf + +DoHlt: cli hlt - jmp HltLoop + jmp DoHlt + BITS 64 AsmRelocateApLoopEnd: