diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/MpService.c b/UefiCpuPkg/PiSmmCpuDxeSmm/MpService.c index baf827cf9d..fa666bd118 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/MpService.c +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/MpService.c @@ -351,6 +351,8 @@ SmmWaitForApArrival ( UINT32 DelayedCount; UINT32 BlockedCount; + PERF_FUNCTION_BEGIN (); + DelayedCount = 0; BlockedCount = 0; @@ -439,7 +441,7 @@ SmmWaitForApArrival ( DEBUG ((DEBUG_INFO, "SmmWaitForApArrival: Delayed AP Count = %d, Blocked AP Count = %d\n", DelayedCount, BlockedCount)); } - return; + PERF_FUNCTION_END (); } /** @@ -577,6 +579,8 @@ BSPHandler ( ASSERT (CpuIndex == mSmmMpSyncData->BspIndex); ApCount = 0; + PERF_FUNCTION_BEGIN (); + // // Flag BSP's presence // @@ -792,6 +796,8 @@ BSPHandler ( *mSmmMpSyncData->Counter = 0; *mSmmMpSyncData->AllCpusInSync = FALSE; mSmmMpSyncData->AllApArrivedWithException = FALSE; + + PERF_FUNCTION_END (); } /** diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.c b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.c index 2144d6ade8..32ca417f73 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.c +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.c @@ -410,12 +410,15 @@ ExecuteFirstSmiInit ( { UINTN Index; + PERF_FUNCTION_BEGIN (); + if (mSmmInitialized == NULL) { mSmmInitialized = (BOOLEAN *)AllocatePool (sizeof (BOOLEAN) * mMaxNumberOfCpus); } ASSERT (mSmmInitialized != NULL); if (mSmmInitialized == NULL) { + PERF_FUNCTION_END (); return; } @@ -442,6 +445,8 @@ ExecuteFirstSmiInit ( while (!(BOOLEAN)mSmmInitialized[Index]) { } } + + PERF_FUNCTION_END (); } /** @@ -463,6 +468,8 @@ SmmRelocateBases ( UINTN Index; UINTN BspIndex; + PERF_FUNCTION_BEGIN (); + // // Make sure the reserved size is large enough for procedure SmmInitTemplate. // @@ -540,6 +547,7 @@ SmmRelocateBases ( // CopyMem (CpuStatePtr, &BakBuf2, sizeof (BakBuf2)); CopyMem (U8Ptr, BakBuf, sizeof (BakBuf)); + PERF_FUNCTION_END (); } /** @@ -617,6 +625,8 @@ PiCpuSmmEntry ( GuidHob = NULL; SmmBaseHobData = NULL; + PERF_FUNCTION_BEGIN (); + // // Initialize address fixup // @@ -1194,6 +1204,7 @@ PiCpuSmmEntry ( DEBUG ((DEBUG_INFO, "SMM CPU Module exit from SMRAM with EFI_SUCCESS\n")); + PERF_FUNCTION_END (); return EFI_SUCCESS; } @@ -1348,12 +1359,15 @@ ConfigSmmCodeAccessCheck ( UINTN Index; EFI_STATUS Status; + PERF_FUNCTION_BEGIN (); + // // Check to see if the Feature Control MSR is supported on this CPU // Index = gSmmCpuPrivate->SmmCoreEntryContext.CurrentlyExecutingCpu; if (!SmmCpuFeaturesIsSmmRegisterSupported (Index, SmmRegFeatureControl)) { mSmmCodeAccessCheckEnable = FALSE; + PERF_FUNCTION_END (); return; } @@ -1363,6 +1377,7 @@ ConfigSmmCodeAccessCheck ( // if ((AsmReadMsr64 (EFI_MSR_SMM_MCA_CAP) & SMM_CODE_ACCESS_CHK_BIT) == 0) { mSmmCodeAccessCheckEnable = FALSE; + PERF_FUNCTION_END (); return; } @@ -1419,6 +1434,8 @@ ConfigSmmCodeAccessCheck ( ReleaseSpinLock (mConfigSmmCodeAccessCheckLock); } } + + PERF_FUNCTION_END (); } /** @@ -1540,6 +1557,8 @@ PerformRemainingTasks ( ) { if (mSmmReadyToLock) { + PERF_FUNCTION_BEGIN (); + // // Check if all Aps enter SMM. In Relaxed-AP Sync Mode, BSP will not wait for // all Aps arrive. However,PerformRemainingTasks() needs to wait all Aps arrive before calling @@ -1587,12 +1606,20 @@ PerformRemainingTasks ( // ConfigSmmCodeAccessCheck (); + // + // Measure performance of SmmCpuFeaturesCompleteSmmReadyToLock() from caller side + // as the implementation is provided by platform. + // + PERF_START (NULL, "SmmCompleteReadyToLock", NULL, 0); SmmCpuFeaturesCompleteSmmReadyToLock (); + PERF_END (NULL, "SmmCompleteReadyToLock", NULL, 0); // // Clean SMM ready to lock flag // mSmmReadyToLock = FALSE; + + PERF_FUNCTION_END (); } } diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.h b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.h index a5c2bdd971..b03f2ef882 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.h +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.h @@ -50,6 +50,7 @@ SPDX-License-Identifier: BSD-2-Clause-Patent #include #include #include +#include #include #include diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.inf b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.inf index 158e05e264..af66a1941c 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.inf +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuDxeSmm.inf @@ -97,6 +97,7 @@ ReportStatusCodeLib SmmCpuFeaturesLib PeCoffGetEntryPointLib + PerformanceLib [Protocols] gEfiSmmAccess2ProtocolGuid ## CONSUMES diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/SmmCpuMemoryManagement.c b/UefiCpuPkg/PiSmmCpuDxeSmm/SmmCpuMemoryManagement.c index 834a756061..8b21e16f1c 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/SmmCpuMemoryManagement.c +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/SmmCpuMemoryManagement.c @@ -1,6 +1,6 @@ /** @file -Copyright (c) 2016 - 2019, Intel Corporation. All rights reserved.
+Copyright (c) 2016 - 2023, Intel Corporation. All rights reserved.
SPDX-License-Identifier: BSD-2-Clause-Patent **/ @@ -1100,6 +1100,8 @@ SetMemMapAttributes ( return; } + PERF_FUNCTION_BEGIN (); + DEBUG ((DEBUG_INFO, "MemoryAttributesTable:\n")); DEBUG ((DEBUG_INFO, " Version - 0x%08x\n", MemoryAttributesTable->Version)); DEBUG ((DEBUG_INFO, " NumberOfEntries - 0x%08x\n", MemoryAttributesTable->NumberOfEntries)); @@ -1152,7 +1154,7 @@ SetMemMapAttributes ( PatchSmmSaveStateMap (); PatchGdtIdtMap (); - return; + PERF_FUNCTION_END (); } /** @@ -1454,6 +1456,8 @@ SetUefiMemMapAttributes ( UINTN Index; EFI_MEMORY_DESCRIPTOR *Entry; + PERF_FUNCTION_BEGIN (); + DEBUG ((DEBUG_INFO, "SetUefiMemMapAttributes\n")); if (mUefiMemoryMap != NULL) { @@ -1537,6 +1541,8 @@ SetUefiMemMapAttributes ( // // Do not free mUefiMemoryAttributesTable, it will be checked in IsSmmCommBufferForbiddenAddress(). // + + PERF_FUNCTION_END (); } /** @@ -1862,6 +1868,7 @@ SetPageTableAttributes ( return; } + PERF_FUNCTION_BEGIN (); DEBUG ((DEBUG_INFO, "SetPageTableAttributes\n")); // @@ -1900,5 +1907,5 @@ SetPageTableAttributes ( EnableCet (); } - return; + PERF_FUNCTION_END (); } diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c b/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c index 1b0b6673e1..ed6e58065f 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c @@ -575,6 +575,8 @@ InitPaging ( IA32_CR4 Cr4; BOOLEAN Enable5LevelPaging; + PERF_FUNCTION_BEGIN (); + Cr4.UintN = AsmReadCr4 (); Enable5LevelPaging = (BOOLEAN)(Cr4.Bits.LA57 == 1); @@ -810,7 +812,7 @@ InitPaging ( // mXdEnabled = TRUE; - return; + PERF_FUNCTION_END (); } /**