MdePkg/BaseLib: BaseLib for LOONGARCH64 architecture.

REF: https://bugzilla.tianocore.org/show_bug.cgi?id=4053

Add LoongArch LOONGARCH64 BaseLib functions.

Cc: Michael D Kinney <michael.d.kinney@intel.com>
Cc: Liming Gao <gaoliming@byosoft.com.cn>
Cc: Zhiguang Liu <zhiguang.liu@intel.com>

Signed-off-by: Chao Li <lichao@loongson.cn>
Co-authored-by: Baoqi Zhang <zhangbaoqi@loongson.cn>
Reviewed-by: Michael D Kinney <michael.d.kinney@intel.com>
This commit is contained in:
Chao Li 2022-09-14 10:29:15 +08:00 committed by mergify[bot]
parent f0a704f9b5
commit cd24eb578b
12 changed files with 363 additions and 1 deletions

View File

@ -6,6 +6,7 @@ Copyright (c) 2006 - 2021, Intel Corporation. All rights reserved.<BR>
Portions copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
Copyright (c) Microsoft Corporation.<BR>
Portions Copyright (c) 2020, Hewlett Packard Enterprise Development LP. All rights reserved.<BR>
Portions Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
@ -152,6 +153,29 @@ typedef struct {
#endif // defined (MDE_CPU_RISCV64)
#if defined (MDE_CPU_LOONGARCH64)
///
/// The LoongArch architecture context buffer used by SetJump() and LongJump()
///
typedef struct {
UINT64 S0;
UINT64 S1;
UINT64 S2;
UINT64 S3;
UINT64 S4;
UINT64 S5;
UINT64 S6;
UINT64 S7;
UINT64 S8;
UINT64 SP;
UINT64 FP;
UINT64 RA;
} BASE_LIBRARY_JUMP_BUFFER;
#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT 8
#endif // defined (MDE_CPU_LOONGARCH64)
//
// String Services
//

View File

@ -21,7 +21,7 @@
LIBRARY_CLASS = BaseLib
#
# VALID_ARCHITECTURES = IA32 X64 EBC ARM AARCH64 RISCV64
# VALID_ARCHITECTURES = IA32 X64 EBC ARM AARCH64 RISCV64 LOONGARCH64
#
[Sources]
@ -402,6 +402,20 @@
RiscV64/RiscVInterrupt.S | GCC
RiscV64/FlushCache.S | GCC
[Sources.LOONGARCH64]
Math64.c
Unaligned.c
LoongArch64/InternalSwitchStack.c
LoongArch64/GetInterruptState.S | GCC
LoongArch64/EnableInterrupts.S | GCC
LoongArch64/DisableInterrupts.S | GCC
LoongArch64/Barrier.S | GCC
LoongArch64/MemoryFence.S | GCC
LoongArch64/CpuBreakpoint.S | GCC
LoongArch64/CpuPause.S | GCC
LoongArch64/SetJumpLongJump.S | GCC
LoongArch64/SwitchStack.S | GCC
[Packages]
MdePkg/MdePkg.dec

View File

@ -0,0 +1,28 @@
#------------------------------------------------------------------------------
#
# LoongArch Barrier Operations
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(AsmDataBarrierLoongArch)
ASM_GLOBAL ASM_PFX(AsmInstructionBarrierLoongArch)
#
# Data barrier operation for LoongArch.
#
ASM_PFX(AsmDataBarrierLoongArch):
dbar 0
jirl $zero, $ra, 0
#
# Instruction barrier operation for LoongArch.
#
ASM_PFX(AsmInstructionBarrierLoongArch):
ibar 0
jirl $zero, $ra, 0
.end

View File

@ -0,0 +1,24 @@
#------------------------------------------------------------------------------
#
# CpuBreakpoint for LoongArch
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(CpuBreakpoint)
#/**
# Generates a breakpoint on the CPU.
#
# Generates a breakpoint on the CPU. The breakpoint must be implemented such
# that code can resume normal execution after the breakpoint.
#
#**/
ASM_PFX(CpuBreakpoint):
break 3
jirl $zero, $ra, 0
.end

View File

@ -0,0 +1,31 @@
#------------------------------------------------------------------------------
#
# CpuPause for LoongArch
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(CpuPause)
#/**
# Requests CPU to pause for a short period of time.
#
# Requests CPU to pause for a short period of time. Typically used in MP
# systems to prevent memory starvation while waiting for a spin lock.
#
#**/
ASM_PFX(CpuPause):
nop
nop
nop
nop
nop
nop
nop
nop
jirl $zero, $ra, 0
.end

View File

@ -0,0 +1,21 @@
#------------------------------------------------------------------------------
#
# LoongArch interrupt disable
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(DisableInterrupts)
#/**
# Disables CPU interrupts.
#**/
ASM_PFX(DisableInterrupts):
li.w $t0, 0x4
csrxchg $zero, $t0, 0x0
jirl $zero, $ra, 0
.end

View File

@ -0,0 +1,21 @@
#------------------------------------------------------------------------------
#
# LoongArch interrupt enable
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(EnableInterrupts)
#/**
# Enables CPU interrupts.
#**/
ASM_PFX(EnableInterrupts):
li.w $t0, 0x4
csrxchg $t0, $t0, 0x0
jirl $zero, $ra, 0
.end

View File

@ -0,0 +1,35 @@
#------------------------------------------------------------------------------
#
# Get LoongArch interrupt status
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(GetInterruptState)
#/**
# Retrieves the current CPU interrupt state.
#
# Returns TRUE means interrupts are currently enabled. Otherwise,
# returns FALSE.
#
# @retval TRUE CPU interrupts are enabled.
# @retval FALSE CPU interrupts are disabled.
#
#**/
ASM_PFX(GetInterruptState):
li.w $t1, 0x4
csrrd $t0, 0x0
and $t0, $t0, $t1
beqz $t0, 1f
li.w $a0, 0x1
b 2f
1:
li.w $a0, 0x0
2:
jirl $zero, $ra, 0
.end

View File

@ -0,0 +1,58 @@
/** @file
SwitchStack() function for LoongArch.
Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
#include "BaseLibInternals.h"
UINTN
EFIAPI
InternalSwitchStackAsm (
IN BASE_LIBRARY_JUMP_BUFFER *JumpBuffer
);
/**
Transfers control to a function starting with a new stack.
Transfers control to the function specified by EntryPoint using the
new stack specified by NewStack and passing in the parameters specified
by Context1 and Context2. Context1 and Context2 are optional and may
be NULL. The function EntryPoint must never return.
If EntryPoint is NULL, then ASSERT().
If NewStack is NULL, then ASSERT().
@param[in] EntryPoint A pointer to function to call with the new stack.
@param[in] Context1 A pointer to the context to pass into the EntryPoint
function.
@param[in] Context2 A pointer to the context to pass into the EntryPoint
function.
@param[in] NewStack A pointer to the new stack to use for the EntryPoint
function.
@param[in] Marker VA_LIST marker for the variable argument list.
**/
VOID
EFIAPI
InternalSwitchStack (
IN SWITCH_STACK_ENTRY_POINT EntryPoint,
IN VOID *Context1 OPTIONAL,
IN VOID *Context2 OPTIONAL,
IN VOID *NewStack,
IN VA_LIST Marker
)
{
BASE_LIBRARY_JUMP_BUFFER JumpBuffer;
JumpBuffer.RA = (UINTN)EntryPoint;
JumpBuffer.SP = (UINTN)NewStack - sizeof (VOID *);
JumpBuffer.SP -= sizeof (Context1) + sizeof (Context2);
((VOID **)(UINTN)JumpBuffer.SP)[0] = Context1;
((VOID **)(UINTN)JumpBuffer.SP)[1] = Context2;
InternalSwitchStackAsm (&JumpBuffer);
}

View File

@ -0,0 +1,18 @@
#------------------------------------------------------------------------------
#
# MemoryFence() for LoongArch
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(MemoryFence)
#
# Memory fence for LoongArch
#
ASM_PFX(MemoryFence):
b AsmDataBarrierLoongArch
.end

View File

@ -0,0 +1,49 @@
#------------------------------------------------------------------------------
#
# Set/Long jump for LoongArch
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
#define STORE st.d /* 64 bit mode regsave instruction */
#define LOAD ld.d /* 64 bit mode regload instruction */
#define RSIZE 8 /* 64 bit mode register size */
ASM_GLOBAL ASM_PFX(SetJump)
ASM_GLOBAL ASM_PFX(InternalLongJump)
ASM_PFX(SetJump):
STORE $s0, $a0, RSIZE * 0
STORE $s1, $a0, RSIZE * 1
STORE $s2, $a0, RSIZE * 2
STORE $s3, $a0, RSIZE * 3
STORE $s4, $a0, RSIZE * 4
STORE $s5, $a0, RSIZE * 5
STORE $s6, $a0, RSIZE * 6
STORE $s7, $a0, RSIZE * 7
STORE $s8, $a0, RSIZE * 8
STORE $sp, $a0, RSIZE * 9
STORE $fp, $a0, RSIZE * 10
STORE $ra, $a0, RSIZE * 11
li.w $a0, 0 # Setjmp return
jirl $zero, $ra, 0
ASM_PFX(InternalLongJump):
LOAD $ra, $a0, RSIZE * 11
LOAD $s0, $a0, RSIZE * 0
LOAD $s1, $a0, RSIZE * 1
LOAD $s2, $a0, RSIZE * 2
LOAD $s3, $a0, RSIZE * 3
LOAD $s4, $a0, RSIZE * 4
LOAD $s5, $a0, RSIZE * 5
LOAD $s6, $a0, RSIZE * 6
LOAD $s7, $a0, RSIZE * 7
LOAD $s8, $a0, RSIZE * 8
LOAD $sp, $a0, RSIZE * 9
LOAD $fp, $a0, RSIZE * 10
move $a0, $a1
jirl $zero, $ra, 0
.end

View File

@ -0,0 +1,39 @@
#------------------------------------------------------------------------------
#
# InternalSwitchStackAsm for LoongArch
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
#define STORE st.d /* 64 bit mode regsave instruction */
#define LOAD ld.d /* 64 bit mode regload instruction */
#define RSIZE 8 /* 64 bit mode register size */
ASM_GLOBAL ASM_PFX(InternalSwitchStackAsm)
/**
This allows the caller to switch the stack and goes to the new entry point
@param JumpBuffer A pointer to CPU context buffer.
**/
ASM_PFX(InternalSwitchStackAsm):
LOAD $ra, $a0, RSIZE * 11
LOAD $s0, $a0, RSIZE * 0
LOAD $s1, $a0, RSIZE * 1
LOAD $s2, $a0, RSIZE * 2
LOAD $s3, $a0, RSIZE * 3
LOAD $s4, $a0, RSIZE * 4
LOAD $s5, $a0, RSIZE * 5
LOAD $s6, $a0, RSIZE * 6
LOAD $s7, $a0, RSIZE * 7
LOAD $s8, $a0, RSIZE * 8
LOAD $sp, $a0, RSIZE * 9
LOAD $fp, $a0, RSIZE * 10
LOAD $a0, $sp, 0
LOAD $a1, $sp, 8
jirl $zero, $ra, 0
.end