UefiCpuPkg/SmmCpuFeaturesLib: Remove .S files for IA32 and X64 arch

.nasm file has been added for X86 arch. .S assembly code
is not required any more.
https://bugzilla.tianocore.org/show_bug.cgi?id=1594

Cc: Michael D Kinney <michael.d.kinney@intel.com>
Cc: Liming Gao <liming.gao@intel.com>
Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Shenglei Zhang <shenglei.zhang@intel.com>
Reviewed-by: Eric Dong <eric.dong@intel.com>
Reviewed-by: Liming Gao <liming.gao@intel.com>
This commit is contained in:
Shenglei Zhang 2019-03-06 15:21:08 +08:00 committed by Liming Gao
parent a8ac75b8ee
commit 475a4317c0
5 changed files with 0 additions and 918 deletions

View File

@ -1,278 +0,0 @@
#------------------------------------------------------------------------------
#
# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php.
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
# Module Name:
#
# SmiEntry.S
#
# Abstract:
#
# Code template of the SMI handler for a particular processor
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(gcStmSmiHandlerTemplate)
ASM_GLOBAL ASM_PFX(gcStmSmiHandlerSize)
ASM_GLOBAL ASM_PFX(gcStmSmiHandlerOffset)
ASM_GLOBAL ASM_PFX(gStmSmiCr3)
ASM_GLOBAL ASM_PFX(gStmSmiStack)
ASM_GLOBAL ASM_PFX(gStmSmbase)
ASM_GLOBAL ASM_PFX(gStmXdSupported)
ASM_GLOBAL ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))
ASM_GLOBAL ASM_PFX(gStmSmiHandlerIdtr)
.equ MSR_IA32_MISC_ENABLE, 0x1A0
.equ MSR_EFER, 0xc0000080
.equ MSR_EFER_XD, 0x800
#
# Constants relating to TXT_PROCESSOR_SMM_DESCRIPTOR
#
.equ DSC_OFFSET, 0xfb00
.equ DSC_GDTPTR, 0x48
.equ DSC_GDTSIZ, 0x50
.equ DSC_CS, 0x14
.equ DSC_DS, 0x16
.equ DSC_SS, 0x18
.equ DSC_OTHERSEG, 0x1A
.equ PROTECT_MODE_CS, 0x08
.equ PROTECT_MODE_DS, 0x20
.equ TSS_SEGMENT, 0x40
.text
ASM_PFX(gcStmSmiHandlerTemplate):
_StmSmiEntryPoint:
.byte 0xbb # mov bx, imm16
.word _StmGdtDesc - _StmSmiEntryPoint + 0x8000
.byte 0x2e,0xa1 # mov ax, cs:[offset16]
.word DSC_OFFSET + DSC_GDTSIZ
decl %eax
movl %eax, %cs:(%edi) # mov cs:[bx], ax
.byte 0x66,0x2e,0xa1 # mov eax, cs:[offset16]
.word DSC_OFFSET + DSC_GDTPTR
movw %ax, %cs:2(%edi)
movw %ax, %bp # ebp = GDT base
.byte 0x66
lgdt %cs:(%edi)
# Patch ProtectedMode Segment
.byte 0xb8 # mov ax, imm16
.word PROTECT_MODE_CS # set AX for segment directly
movl %eax, %cs:-2(%edi) # mov cs:[bx - 2], ax
# Patch ProtectedMode entry
.byte 0x66, 0xbf # mov edi, SMBASE
ASM_PFX(gStmSmbase): .space 4
.byte 0x67
lea ((Start32bit - _StmSmiEntryPoint) + 0x8000)(%edi), %ax
movw %ax, %cs:-6(%edi)
movl %cr0, %ebx
.byte 0x66
andl $0x9ffafff3, %ebx
.byte 0x66
orl $0x23, %ebx
movl %ebx, %cr0
.byte 0x66,0xea
.space 4
.space 2
_StmGdtDesc: .space 4
.space 2
Start32bit:
movw $PROTECT_MODE_DS, %ax
movl %eax,%ds
movl %eax,%es
movl %eax,%fs
movl %eax,%gs
movl %eax,%ss
.byte 0xbc # mov esp, imm32
ASM_PFX(gStmSmiStack): .space 4
movl $ASM_PFX(gStmSmiHandlerIdtr), %eax
lidt (%eax)
jmp ProtFlatMode
ProtFlatMode:
.byte 0xb8 # mov eax, imm32
ASM_PFX(gStmSmiCr3): .space 4
movl %eax, %cr3
#
# Need to test for CR4 specific bit support
#
movl $1, %eax
cpuid # use CPUID to determine if specific CR4 bits are supported
xorl %eax, %eax # Clear EAX
testl $BIT2, %edx # Check for DE capabilities
jz L8
orl $BIT3, %eax
L8:
testl $BIT6, %edx # Check for PAE capabilities
jz L9
orl $BIT5, %eax
L9:
testl $BIT7, %edx # Check for MCE capabilities
jz L10
orl $BIT6, %eax
L10:
testl $BIT24, %edx # Check for FXSR capabilities
jz L11
orl $BIT9, %eax
L11:
testl $BIT25, %edx # Check for SSE capabilities
jz L12
orl $BIT10, %eax
L12: # as cr4.PGE is not set here, refresh cr3
movl %eax, %cr4 # in PreModifyMtrrs() to flush TLB.
cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))
jz L5
# Load TSS
movb $0x89, (TSS_SEGMENT + 5)(%ebp) # clear busy flag
movl $TSS_SEGMENT, %eax
ltrw %ax
L5:
# enable NXE if supported
.byte 0xb0 # mov al, imm8
ASM_PFX(gStmXdSupported): .byte 1
cmpb $0, %al
jz SkipXd
#
# Check XD disable bit
#
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
pushl %edx # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L13
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L13:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
jmp XdDone
SkipXd:
subl $4, %esp
XdDone:
movl %cr0, %ebx
orl $0x080010023, %ebx # enable paging + WP + NE + MP + PE
movl %ebx, %cr0
leal DSC_OFFSET(%edi),%ebx
movw DSC_DS(%ebx),%ax
movl %eax, %ds
movw DSC_OTHERSEG(%ebx),%ax
movl %eax, %es
movl %eax, %fs
movl %eax, %gs
movw DSC_SS(%ebx),%ax
movl %eax, %ss
CommonHandler:
movl 4(%esp), %ebx
pushl %ebx
movl $ASM_PFX(CpuSmmDebugEntry), %eax
call *%eax
addl $4, %esp
pushl %ebx
movl $ASM_PFX(SmiRendezvous), %eax
call *%eax
addl $4, %esp
pushl %ebx
movl $ASM_PFX(CpuSmmDebugExit), %eax
call *%eax
addl $4, %esp
movl $ASM_PFX(gStmXdSupported), %eax
movb (%eax), %al
cmpb $0, %al
jz L16
popl %edx # get saved MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx
jz L16
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM
wrmsr
L16:
rsm
_StmSmiHandler:
#
# Check XD disable bit
#
xorl %esi, %esi
movl $ASM_PFX(gStmXdSupported), %eax
movb (%eax), %al
cmpb $0, %al
jz StmXdDone
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
movl %edx, %esi # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L14
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L14:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
StmXdDone:
push %esi
# below step is needed, because STM does not run above code.
# we have to run below code to set IDT/CR0/CR4
movl $ASM_PFX(gStmSmiHandlerIdtr), %eax
lidt (%eax)
movl %cr0, %eax
orl $0x80010023, %eax # enable paging + WP + NE + MP + PE
movl %eax, %cr0
#
# Need to test for CR4 specific bit support
#
movl $1, %eax
cpuid # use CPUID to determine if specific CR4 bits are supported
movl %cr4, %eax # init EAX
testl $BIT2, %edx # Check for DE capabilities
jz L28
orl $BIT3, %eax
L28:
testl $BIT6, %edx # Check for PAE capabilities
jz L29
orl $BIT5, %eax
L29:
testl $BIT7, %edx # Check for MCE capabilities
jz L30
orl $BIT6, %eax
L30:
testl $BIT24, %edx # Check for FXSR capabilities
jz L31
orl $BIT9, %eax
L31:
testl $BIT25, %edx # Check for SSE capabilities
jz L32
orl $BIT10, %eax
L32: # as cr4.PGE is not set here, refresh cr3
movl %eax, %cr4 # in PreModifyMtrrs() to flush TLB.
# STM init finish
jmp CommonHandler
ASM_PFX(gcStmSmiHandlerSize) : .word . - _StmSmiEntryPoint
ASM_PFX(gcStmSmiHandlerOffset): .word _StmSmiHandler - _StmSmiEntryPoint

View File

@ -1,174 +0,0 @@
#------------------------------------------------------------------------------
#
# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php.
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
# Module Name:
#
# SmiException.S
#
# Abstract:
#
# Exception handlers used in SM mode
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(gcStmPsd)
ASM_GLOBAL ASM_PFX(SmmStmExceptionHandler)
ASM_GLOBAL ASM_PFX(SmmStmSetup)
ASM_GLOBAL ASM_PFX(SmmStmTeardown)
.equ MSR_IA32_MISC_ENABLE, 0x1A0
.equ MSR_EFER, 0xc0000080
.equ MSR_EFER_XD, 0x800
.equ CODE_SEL, 0x08
.equ DATA_SEL, 0x20
.equ TSS_SEL, 0x40
.data
ASM_PFX(gcStmPsd):
.ascii "TXTPSSIG"
.word PSD_SIZE
.word 1 # Version
.long 0 # LocalApicId
.byte 0x5 # Cr4Pse;Cr4Pae;Intel64Mode;ExecutionDisableOutsideSmrr
.byte 0 # BIOS to STM
.byte 0 # STM to BIOS
.byte 0
.word CODE_SEL
.word DATA_SEL
.word DATA_SEL
.word DATA_SEL
.word TSS_SEL
.word 0
.quad 0 # SmmCr3
.long ASM_PFX(_OnStmSetup)
.long 0
.long ASM_PFX(_OnStmTeardown)
.long 0
.quad 0 # SmmSmiHandlerRip - SMM guest entrypoint
.quad 0 # SmmSmiHandlerRsp
.quad 0
.long 0
.long 0x80010100 # RequiredStmSmmRevId
.long ASM_PFX(_OnException)
.long 0
.quad 0 # ExceptionStack
.word DATA_SEL
.word 0x1F # ExceptionFilter
.long 0
.quad 0
.quad 0 # BiosHwResourceRequirementsPtr
.quad 0 # AcpiRsdp
.byte 0 # PhysicalAddressBits
.equ PSD_SIZE, . - ASM_PFX(gcStmPsd)
.text
#------------------------------------------------------------------------------
# SMM Exception handlers
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(_OnException)
ASM_PFX(_OnException):
movl %esp, %ecx
pushl %ecx
call ASM_PFX(SmmStmExceptionHandler)
addl $4, %esp
movl %eax, %ebx
movl $4, %eax
.byte 0xf, 0x1, 0xc1 # VMCALL
jmp .
ASM_GLOBAL ASM_PFX(_OnStmSetup)
ASM_PFX(_OnStmSetup):
#
# Check XD disable bit
#
xorl %esi, %esi
movl $ASM_PFX(gStmXdSupported), %eax
movb (%eax), %al
cmpb $0, %al
jz StmXdDone1
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
movl %edx, %esi # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L13
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L13:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
StmXdDone1:
push %esi
call ASM_PFX(SmmStmSetup)
movl $ASM_PFX(gStmXdSupported), %eax
movb (%eax), %al
cmpb $0, %al
jz L14
popl %edx # get saved MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx
jz L14
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM
wrmsr
L14:
rsm
ASM_GLOBAL ASM_PFX(_OnStmTeardown)
ASM_PFX(_OnStmTeardown):
#
# Check XD disable bit
#
xorl %esi, %esi
movl $ASM_PFX(gStmXdSupported), %eax
movb (%eax), %al
cmpb $0, %al
jz StmXdDone2
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
movl %edx, %esi # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L15
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L15:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
StmXdDone2:
push %esi
call ASM_PFX(SmmStmTeardown)
movl $ASM_PFX(gStmXdSupported), %eax
movb (%eax), %al
cmpb $0, %al
jz L16
popl %edx # get saved MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx
jz L16
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM
wrmsr
L16:
rsm

View File

@ -35,9 +35,6 @@
Ia32/SmiEntry.nasm
Ia32/SmiException.nasm
Ia32/SmiEntry.S
Ia32/SmiException.S
[Sources.X64]
X64/SmmStmSupport.c
@ -45,9 +42,6 @@
X64/SmiEntry.nasm
X64/SmiException.nasm
X64/SmiEntry.S
X64/SmiException.S
[Packages]
MdePkg/MdePkg.dec
MdeModulePkg/MdeModulePkg.dec

View File

@ -1,282 +0,0 @@
#------------------------------------------------------------------------------
#
# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php.
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
# Module Name:
#
# SmiEntry.S
#
# Abstract:
#
# Code template of the SMI handler for a particular processor
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(gcStmSmiHandlerTemplate)
ASM_GLOBAL ASM_PFX(gcStmSmiHandlerSize)
ASM_GLOBAL ASM_PFX(gcStmSmiHandlerOffset)
ASM_GLOBAL ASM_PFX(gStmSmiCr3)
ASM_GLOBAL ASM_PFX(gStmSmiStack)
ASM_GLOBAL ASM_PFX(gStmSmbase)
ASM_GLOBAL ASM_PFX(gStmXdSupported)
ASM_GLOBAL ASM_PFX(gStmSmiHandlerIdtr)
.equ MSR_IA32_MISC_ENABLE, 0x1A0
.equ MSR_EFER, 0xc0000080
.equ MSR_EFER_XD, 0x800
#
# Constants relating to TXT_PROCESSOR_SMM_DESCRIPTOR
#
.equ DSC_OFFSET, 0xfb00
.equ DSC_GDTPTR, 0x48
.equ DSC_GDTSIZ, 0x50
.equ DSC_CS, 0x14
.equ DSC_DS, 0x16
.equ DSC_SS, 0x18
.equ DSC_OTHERSEG, 0x1a
#
# Constants relating to CPU State Save Area
#
.equ SSM_DR6, 0xffd0
.equ SSM_DR7, 0xffc8
.equ PROTECT_MODE_CS, 0x08
.equ PROTECT_MODE_DS, 0x20
.equ LONG_MODE_CS, 0x38
.equ TSS_SEGMENT, 0x40
.equ GDT_SIZE, 0x50
.text
ASM_PFX(gcStmSmiHandlerTemplate):
_StmSmiEntryPoint:
#
# The encoding of BX in 16-bit addressing mode is the same as of RDI in 64-
# bit addressing mode. And that coincidence has been used in the following
# "64-bit like" 16-bit code. Be aware that once RDI is referenced as a
# base address register, it is actually BX that is referenced.
#
.byte 0xbb # mov bx, imm16
.word _StmGdtDesc - _StmSmiEntryPoint + 0x8000
#
# fix GDT descriptor
#
.byte 0x2e,0xa1 # mov ax, cs:[offset16]
.word DSC_OFFSET + DSC_GDTSIZ
.byte 0x48 # dec ax
.byte 0x2e
movl %eax, (%rdi) # mov cs:[bx], ax
.byte 0x66,0x2e,0xa1 # mov eax, cs:[offset16]
.word DSC_OFFSET + DSC_GDTPTR
.byte 0x2e
movw %ax, 2(%rdi)
.byte 0x66,0x2e
lgdt (%rdi)
#
# Patch ProtectedMode Segment
#
.byte 0xb8
.word PROTECT_MODE_CS
.byte 0x2e
movl %eax, -2(%rdi)
#
# Patch ProtectedMode entry
#
.byte 0x66, 0xbf # mov edi, SMBASE
ASM_PFX(gStmSmbase): .space 4
lea ((ProtectedMode - _StmSmiEntryPoint) + 0x8000)(%edi), %ax
.byte 0x2e
movw %ax, -6(%rdi)
#
# Switch into ProtectedMode
#
movq %cr0, %rbx
.byte 0x66
andl $0x9ffafff3, %ebx
.byte 0x66
orl $0x00000023, %ebx
movq %rbx, %cr0
.byte 0x66, 0xea
.space 6
_StmGdtDesc: .space 6
ProtectedMode:
movw $PROTECT_MODE_DS, %ax
movl %eax, %ds
movl %eax, %es
movl %eax, %fs
movl %eax, %gs
movl %eax, %ss
.byte 0xbc # mov esp, imm32
ASM_PFX(gStmSmiStack): .space 4
jmp ProtFlatMode
ProtFlatMode:
.byte 0xb8
ASM_PFX(gStmSmiCr3): .space 4
movq %rax, %cr3
movl $0x668,%eax # as cr4.PGE is not set here, refresh cr3
movq %rax, %cr4 # in PreModifyMtrrs() to flush TLB.
# Load TSS
subl $8, %esp # reserve room in stack
sgdt (%rsp)
movl 2(%rsp), %eax # eax = GDT base
addl $8, %esp
movb $0x89, %dl
movb %dl, (TSS_SEGMENT + 5)(%rax) # clear busy flag
movl $TSS_SEGMENT, %eax
ltr %ax
# enable NXE if supported
.byte 0xb0 # mov al, imm8
ASM_PFX(gStmXdSupported): .byte 1
cmpb $0, %al
jz SkipXd
#
# Check XD disable bit
#
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
subl $4, %esp
pushq %rdx # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L13
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L13:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
jmp XdDone
SkipXd:
subl $8, %esp
XdDone:
#
# Switch to LongMode
#
pushq $LONG_MODE_CS # push cs hardcore here
call Base # push return address for retf later
Base:
addl $(LongMode - Base), (%rsp) # offset for far retf, seg is the 1st arg
movl $MSR_EFER, %ecx
rdmsr
orb $1,%ah # enable LME
wrmsr
movq %cr0, %rbx
orl $0x080010023, %ebx # enable paging + WP + NE + MP + PE
movq %rbx, %cr0
retf
LongMode: # long mode (64-bit code) starts here
movabsq $ASM_PFX(gStmSmiHandlerIdtr), %rax
lidt (%rax)
lea (DSC_OFFSET)(%rdi), %ebx
movw DSC_DS(%rbx), %ax
movl %eax,%ds
movw DSC_OTHERSEG(%rbx), %ax
movl %eax,%es
movl %eax,%fs
movl %eax,%gs
movw DSC_SS(%rbx), %ax
movl %eax,%ss
CommonHandler:
movq 8(%rsp), %rbx
# Save FP registers
subq $0x200, %rsp
.byte 0x48 # FXSAVE64
fxsave (%rsp)
addq $-0x20, %rsp
movq %rbx, %rcx
movabsq $ASM_PFX(CpuSmmDebugEntry), %rax
call *%rax
movq %rbx, %rcx
movabsq $ASM_PFX(SmiRendezvous), %rax
call *%rax
movq %rbx, %rcx
movabsq $ASM_PFX(CpuSmmDebugExit), %rax
call *%rax
addq $0x20, %rsp
#
# Restore FP registers
#
.byte 0x48 # FXRSTOR64
fxrstor (%rsp)
addq $0x200, %rsp
movabsq $ASM_PFX(gStmXdSupported), %rax
movb (%rax), %al
cmpb $0, %al
jz L16
popq %rdx # get saved MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx
jz L16
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM
wrmsr
L16:
rsm
_StmSmiHandler:
#
# Check XD disable bit
#
xorq %r8, %r8
movabsq $ASM_PFX(gStmXdSupported), %rax
movb (%rax), %al
cmpb $0, %al
jz StmXdDone
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
movq %rdx, %r8 # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L14
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L14:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
StmXdDone:
pushq %r8
# below step is needed, because STM does not run above code.
# we have to run below code to set IDT/CR0/CR4
movabsq $ASM_PFX(gStmSmiHandlerIdtr), %rax
lidt (%rax)
movq %cr0, %rax
orl $0x80010023, %eax
movq %rax, %cr0
movq %cr4, %rax
movl $0x668, %eax # as cr4.PGE is not set here, refresh cr3
movq %rax, %cr4 # in PreModifyMtrrs() to flush TLB.
# STM init finish
jmp CommonHandler
ASM_PFX(gcStmSmiHandlerSize) : .word . - _StmSmiEntryPoint
ASM_PFX(gcStmSmiHandlerOffset): .word _StmSmiHandler - _StmSmiEntryPoint

View File

@ -1,178 +0,0 @@
#------------------------------------------------------------------------------
#
# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php.
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
# Module Name:
#
# SmiException.S
#
# Abstract:
#
# Exception handlers used in SM mode
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(gcStmPsd)
ASM_GLOBAL ASM_PFX(SmmStmExceptionHandler)
ASM_GLOBAL ASM_PFX(SmmStmSetup)
ASM_GLOBAL ASM_PFX(SmmStmTeardown)
.equ CODE_SEL, 0x38
.equ DATA_SEL, 0x20
.equ TR_SEL, 0x40
.equ MSR_IA32_MISC_ENABLE, 0x1A0
.equ MSR_EFER, 0x0c0000080
.equ MSR_EFER_XD, 0x0800
.data
#
# This structure serves as a template for all processors.
#
ASM_PFX(gcStmPsd):
.ascii "TXTPSSIG"
.word PSD_SIZE
.word 1 # Version
.long 0 # LocalApicId
.byte 0xF # Cr4Pse;Cr4Pae;Intel64Mode;ExecutionDisableOutsideSmrr
.byte 0 # BIOS to STM
.byte 0 # STM to BIOS
.byte 0
.word CODE_SEL
.word DATA_SEL
.word DATA_SEL
.word DATA_SEL
.word TR_SEL
.word 0
.quad 0 # SmmCr3
.quad ASM_PFX(_OnStmSetup)
.quad ASM_PFX(_OnStmTeardown)
.quad 0 # SmmSmiHandlerRip - SMM guest entrypoint
.quad 0 # SmmSmiHandlerRsp
.quad 0
.long 0
.long 0x80010100 # RequiredStmSmmRevId
.quad ASM_PFX(_OnException)
.quad 0 # ExceptionStack
.word DATA_SEL
.word 0x1F # ExceptionFilter
.long 0
.quad 0
.quad 0 # BiosHwResourceRequirementsPtr
.quad 0 # AcpiRsdp
.byte 0 # PhysicalAddressBits
.equ PSD_SIZE, . - ASM_PFX(gcStmPsd)
.text
#------------------------------------------------------------------------------
# SMM Exception handlers
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(_OnException)
ASM_PFX(_OnException):
movq %rsp, %rcx
subq $0x28, %rsp
call ASM_PFX(SmmStmExceptionHandler)
addq $0x28, %rsp
movl %eax, %ebx
movl $4, %eax
.byte 0xf, 0x1, 0xc1 # VMCALL
jmp .
ASM_GLOBAL ASM_PFX(_OnStmSetup)
ASM_PFX(_OnStmSetup):
#
# Check XD disable bit
#
xorq %r8, %r8
movabsq $ASM_PFX(gStmXdSupported), %rax
movb (%rax), %al
cmpb $0, %al
jz StmXdDone1
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
movq %rdx, %r8 # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L13
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L13:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
StmXdDone1:
pushq %r8
subq $0x20, %rsp
call ASM_PFX(SmmStmSetup)
addq 0x20, %rsp
movabsq $ASM_PFX(gStmXdSupported), %rax
movb (%rax), %al
cmpb $0, %al
jz L14
popq %rdx # get saved MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx
jz L14
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM
wrmsr
L14:
rsm
ASM_GLOBAL ASM_PFX(_OnStmTeardown)
ASM_PFX(_OnStmTeardown):
#
# Check XD disable bit
#
xorq %r8, %r8
movabsq $ASM_PFX(gStmXdSupported), %rax
movb (%rax), %al
cmpb $0, %al
jz StmXdDone2
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
movq %rdx, %r8 # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L15
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L15:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
StmXdDone2:
pushq %r8
subq $0x20, %rsp
call ASM_PFX(SmmStmTeardown)
addq $0x20, %rsp
movabsq $ASM_PFX(gStmXdSupported), %rax
movb (%rax), %al
cmpb $0, %al
jz L16
popq %rdx # get saved MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx
jz L16
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM
wrmsr
L16:
rsm