mirror of
https://github.com/acidanthera/audk.git
synced 2025-04-08 17:05:09 +02:00
Sync EDKII BaseTools to BaseTools project r2093.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@11057 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
parent
5460c4bbc5
commit
6780eef1f9
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -364,7 +364,7 @@
|
||||
$(MAKE_FILE)
|
||||
|
||||
<Command.MSFT, Command.INTEL>
|
||||
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
||||
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(ASLCC_FLAGS) $(INC) ${src}
|
||||
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
||||
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
|
||||
|
||||
@ -384,7 +384,7 @@
|
||||
$(MAKE_FILE)
|
||||
|
||||
<Command.MSFT, Command.INTEL>
|
||||
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
||||
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(ASLCC_FLAGS) $(INC) ${src}
|
||||
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
||||
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -20,6 +20,7 @@ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#include "fat.h"
|
||||
#include "mbr.h"
|
||||
#include "EfiUtilityMsgs.h"
|
||||
#include "ParseInf.h"
|
||||
|
||||
#define DEBUG_WARN 0x1
|
||||
#define DEBUG_ERROR 0x2
|
||||
@ -845,7 +846,9 @@ main (
|
||||
BOOLEAN ProcessMbr; // -m
|
||||
BOOLEAN DoParse; // -p SrcImage or -g SrcImage DstImage
|
||||
BOOLEAN Verbose; // -v
|
||||
|
||||
UINT64 LogLevel;
|
||||
EFI_STATUS EfiStatus;
|
||||
|
||||
SrcImage = DstImage = NULL;
|
||||
ForcePatch = FALSE;
|
||||
ProcessMbr = FALSE;
|
||||
@ -886,6 +889,23 @@ main (
|
||||
ProcessMbr = TRUE;
|
||||
} else if (strcmp (*argv, "-v") == 0 || strcmp (*argv, "--verbose") == 0) {
|
||||
Verbose = TRUE;
|
||||
} else if ((stricmp (*argv, "-d") == 0) || (stricmp (*argv, "--debug") == 0)) {
|
||||
argc--; argv++;
|
||||
if (argc < 1) {
|
||||
Usage ();
|
||||
return -1;
|
||||
}
|
||||
EfiStatus = AsciiStringToUint64 (*argv, FALSE, &LogLevel);
|
||||
if (EFI_ERROR (EfiStatus)) {
|
||||
Error (NULL, 0, 1003, "Invalid option value", "%s = %s", "--debug", *argv);
|
||||
return 1;
|
||||
}
|
||||
if (LogLevel > 9) {
|
||||
Error (NULL, 0, 1003, "Invalid option value", "Debug Level range is 0-9, currnt input level is %d", (int) LogLevel);
|
||||
return 1;
|
||||
}
|
||||
SetPrintLevel (LogLevel);
|
||||
DebugMsg (NULL, 0, 9, "Debug Mode Set", "Debug Output Mode Level %s is set!", *argv);
|
||||
} else {
|
||||
Usage ();
|
||||
return -1;
|
||||
|
@ -105,23 +105,22 @@ ConvertVersionInfo (
|
||||
/*++
|
||||
Routine Description:
|
||||
|
||||
This function converts GUID string to GUID
|
||||
This function split version to major version and minor version
|
||||
|
||||
Arguments:
|
||||
|
||||
Str - String representing in form XX.XX
|
||||
MajorVer - The major vertion
|
||||
MinorVer - The minor vertion
|
||||
MajorVer - The major version
|
||||
MinorVer - The minor version
|
||||
|
||||
Returns:
|
||||
|
||||
EFI_SUCCESS - The fuction completed successfully.
|
||||
EFI_SUCCESS - The function completed successfully.
|
||||
|
||||
--*/
|
||||
{
|
||||
CHAR8 StrPtr[40];
|
||||
CHAR8 *Token;
|
||||
UINTN Length;
|
||||
unsigned Major;
|
||||
unsigned Minor;
|
||||
|
||||
@ -135,10 +134,9 @@ Returns:
|
||||
Token = strtok (NULL, ".");
|
||||
}
|
||||
|
||||
Length = strlen (StrPtr);
|
||||
sscanf (
|
||||
StrPtr,
|
||||
"%01x%02x",
|
||||
"%02d%02d",
|
||||
&Major,
|
||||
&Minor
|
||||
);
|
||||
|
@ -235,10 +235,13 @@ ProcessBsOrMbr (
|
||||
}
|
||||
|
||||
//Process Floppy Disk
|
||||
OutputFile = fopen(OutputInfo->PhysicalPath, "w");
|
||||
OutputFile = fopen(OutputInfo->PhysicalPath, "r+");
|
||||
if (OutputFile == NULL) {
|
||||
return ErrorFileReadWrite;
|
||||
}
|
||||
OutputFile = fopen(OutputInfo->PhysicalPath, "w");
|
||||
if (OutputFile == NULL) {
|
||||
return ErrorFileReadWrite;
|
||||
}
|
||||
}
|
||||
|
||||
if (OutputInfo->Type != PathFile) {
|
||||
if (ProcessMbr) {
|
||||
|
@ -149,13 +149,13 @@ typedef INT64 INTN;
|
||||
//
|
||||
// Processor specific defines
|
||||
//
|
||||
#define MAX_BIT 0x8000000000000000
|
||||
#define MAX_2_BITS 0xC000000000000000
|
||||
#define MAX_BIT 0x8000000000000000ULL
|
||||
#define MAX_2_BITS 0xC000000000000000ULL
|
||||
|
||||
//
|
||||
// Maximum legal Itanium-based address
|
||||
//
|
||||
#define MAX_ADDRESS 0xFFFFFFFFFFFFFFFF
|
||||
#define MAX_ADDRESS 0xFFFFFFFFFFFFFFFFULL
|
||||
|
||||
//
|
||||
// Modifier to ensure that all protocol member functions and EFI intrinsics
|
||||
|
@ -1,10 +1,10 @@
|
||||
## @file
|
||||
#
|
||||
# The makefile can be invoked with
|
||||
# ARCH = x86_64 or x64 for EM64T build
|
||||
# ARCH = ia32 or IA32 for IA32 build
|
||||
# ARCH = ia64 or IA64 for IA64 build
|
||||
#
|
||||
# The makefile can be invoked with
|
||||
# ARCH = x86_64 or x64 for EM64T build
|
||||
# ARCH = ia32 or IA32 for IA32 build
|
||||
# ARCH = ia64 or IA64 for IA64 build
|
||||
#
|
||||
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
@ -12,52 +12,56 @@
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
ARCH ?= IA32
|
||||
|
||||
CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
|
||||
LINUX:=$(findstring Linux, $(shell uname -s))
|
||||
DARWIN:=$(findstring Darwin, $(shell uname -s))
|
||||
|
||||
CC = gcc
|
||||
CXX = g++
|
||||
AS = gcc
|
||||
AR = ar
|
||||
LD = ld
|
||||
LINKER ?= $(CC)
|
||||
ifeq ($(ARCH), IA32)
|
||||
ARCH_INCLUDE = -I $(MAKEROOT)/Include/Ia32/
|
||||
endif
|
||||
|
||||
ifeq ($(ARCH), X64)
|
||||
ARCH_INCLUDE = -I $(MAKEROOT)/Include/X64/
|
||||
endif
|
||||
|
||||
INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE)
|
||||
CPPFLAGS = $(INCLUDE)
|
||||
CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fno-merge-constants -nostdlib -Wall -Werror -c -g
|
||||
LFLAGS =
|
||||
|
||||
#
|
||||
# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns -i386, but gcc defaults
|
||||
# to x86_64. So make sure tools match uname -m
|
||||
#
|
||||
uname_s = $(shell uname -s)
|
||||
ifeq ($(uname_s),Darwin)
|
||||
CFLAGS += -arch i386
|
||||
CPPFLAGS += -arch i386
|
||||
LFLAGS += -arch i386
|
||||
endif
|
||||
|
||||
.PHONY: all
|
||||
.PHONY: install
|
||||
.PHONY: clean
|
||||
|
||||
all:
|
||||
|
||||
$(MAKEROOT)/libs:
|
||||
mkdir $(MAKEROOT)/libs
|
||||
|
||||
$(MAKEROOT)/bin:
|
||||
mkdir $(MAKEROOT)/bin
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
ARCH ?= IA32
|
||||
|
||||
CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
|
||||
LINUX:=$(findstring Linux, $(shell uname -s))
|
||||
DARWIN:=$(findstring Darwin, $(shell uname -s))
|
||||
|
||||
CC = gcc
|
||||
CXX = g++
|
||||
AS = gcc
|
||||
AR = ar
|
||||
LD = ld
|
||||
LINKER ?= $(CC)
|
||||
ifeq ($(ARCH), IA32)
|
||||
ARCH_INCLUDE = -I $(MAKEROOT)/Include/Ia32/
|
||||
endif
|
||||
|
||||
ifeq ($(ARCH), X64)
|
||||
ARCH_INCLUDE = -I $(MAKEROOT)/Include/X64/
|
||||
endif
|
||||
|
||||
INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE)
|
||||
CPPFLAGS = $(INCLUDE)
|
||||
CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fno-merge-constants -nostdlib -Wall -Werror -c -g
|
||||
LFLAGS =
|
||||
|
||||
ifeq ($(ARCH), IA32)
|
||||
#
|
||||
# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns i386, but gcc defaults
|
||||
# to x86_64. So make sure tools match uname -m. You can manual have a 64-bit kernal on Snow Leopard
|
||||
# so only do this is uname -m returns i386.
|
||||
#
|
||||
uname_s = $(shell uname -s)
|
||||
ifeq ($(uname_s),Darwin)
|
||||
CFLAGS += -arch i386
|
||||
CPPFLAGS += -arch i386
|
||||
LFLAGS += -arch i386
|
||||
endif
|
||||
endif
|
||||
|
||||
|
||||
.PHONY: all
|
||||
.PHONY: install
|
||||
.PHONY: clean
|
||||
|
||||
all:
|
||||
|
||||
$(MAKEROOT)/libs:
|
||||
mkdir $(MAKEROOT)/libs
|
||||
|
||||
$(MAKEROOT)/bin:
|
||||
mkdir $(MAKEROOT)/bin
|
||||
|
@ -7,12 +7,12 @@
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
include $(MAKEROOT)/Makefiles/header.makefile
|
||||
|
||||
LIBRARY = $(MAKEROOT)/libs/lib$(LIBNAME).a
|
||||
|
||||
all: $(MAKEROOT)/libs $(LIBRARY)
|
||||
|
||||
include $(MAKEROOT)/Makefiles/footer.makefile
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
include $(MAKEROOT)/Makefiles/header.makefile
|
||||
|
||||
LIBRARY = $(MAKEROOT)/libs/lib$(LIBNAME).a
|
||||
|
||||
all: $(MAKEROOT)/libs $(LIBRARY)
|
||||
|
||||
include $(MAKEROOT)/Makefiles/footer.makefile
|
||||
|
@ -189,7 +189,12 @@ class WorkspaceAutoGen(AutoGen):
|
||||
#
|
||||
Pa.CollectPlatformDynamicPcds()
|
||||
self.AutoGenObjectList.append(Pa)
|
||||
|
||||
|
||||
#
|
||||
# Check PCDs token value conflict in each DEC file.
|
||||
#
|
||||
self._CheckAllPcdsTokenValueConflict()
|
||||
|
||||
self._BuildDir = None
|
||||
self._FvDir = None
|
||||
self._MakeFileDir = None
|
||||
@ -248,6 +253,75 @@ class WorkspaceAutoGen(AutoGen):
|
||||
# BuildCommand should be all the same. So just get one from platform AutoGen
|
||||
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
|
||||
return self._BuildCommand
|
||||
|
||||
## Check the PCDs token value conflict in each DEC file.
|
||||
#
|
||||
# Will cause build break and raise error message while two PCDs conflict.
|
||||
#
|
||||
# @return None
|
||||
#
|
||||
def _CheckAllPcdsTokenValueConflict(self):
|
||||
if len(self.BuildDatabase.WorkspaceDb.PackageList) >= 1:
|
||||
for Package in self.BuildDatabase.WorkspaceDb.PackageList:
|
||||
PcdList = Package.Pcds.values()
|
||||
PcdList.sort(lambda x, y: cmp(x.TokenValue, y.TokenValue))
|
||||
Count = 0
|
||||
while (Count < len(PcdList) - 1) :
|
||||
Item = PcdList[Count]
|
||||
ItemNext = PcdList[Count + 1]
|
||||
#
|
||||
# Make sure in the same token space the TokenValue should be unique
|
||||
#
|
||||
if (Item.TokenValue == ItemNext.TokenValue):
|
||||
SameTokenValuePcdList = []
|
||||
SameTokenValuePcdList.append(Item)
|
||||
SameTokenValuePcdList.append(ItemNext)
|
||||
RemainPcdListLength = len(PcdList) - Count - 2
|
||||
for ValueSameCount in range(RemainPcdListLength):
|
||||
if PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue == Item.TokenValue:
|
||||
SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])
|
||||
else:
|
||||
break;
|
||||
#
|
||||
# Sort same token value PCD list with TokenGuid and TokenCName
|
||||
#
|
||||
SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName)))
|
||||
SameTokenValuePcdListCount = 0
|
||||
while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
|
||||
TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
|
||||
TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
|
||||
|
||||
if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\
|
||||
% (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),
|
||||
ExtraData=None
|
||||
)
|
||||
SameTokenValuePcdListCount += 1
|
||||
Count += SameTokenValuePcdListCount
|
||||
Count += 1
|
||||
|
||||
PcdList = Package.Pcds.values()
|
||||
PcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName)))
|
||||
Count = 0
|
||||
while (Count < len(PcdList) - 1) :
|
||||
Item = PcdList[Count]
|
||||
ItemNext = PcdList[Count + 1]
|
||||
#
|
||||
# Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.
|
||||
#
|
||||
if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (Item.TokenValue != ItemNext.TokenValue):
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\
|
||||
% (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),
|
||||
ExtraData=None
|
||||
)
|
||||
Count += 1
|
||||
|
||||
|
||||
## Create makefile for the platform and modules in it
|
||||
#
|
||||
@ -306,7 +380,27 @@ class PlatformAutoGen(AutoGen):
|
||||
#
|
||||
_DynaPcdList_ = []
|
||||
_NonDynaPcdList_ = []
|
||||
|
||||
|
||||
#
|
||||
# The priority list while override build option
|
||||
#
|
||||
PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
|
||||
"0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
"0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
"0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
"0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
|
||||
"0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
|
||||
"0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
|
||||
"0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
|
||||
"0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
|
||||
"0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
|
||||
"0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
|
||||
"0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
|
||||
"0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
|
||||
"0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
|
||||
"0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
|
||||
"0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
|
||||
|
||||
## The real constructor of PlatformAutoGen
|
||||
#
|
||||
# This method is not supposed to be called by users of PlatformAutoGen. It's
|
||||
@ -481,12 +575,12 @@ class PlatformAutoGen(AutoGen):
|
||||
UnicodePcdArray = []
|
||||
HiiPcdArray = []
|
||||
OtherPcdArray = []
|
||||
VpdPcdDict = {}
|
||||
VpdFile = VpdInfoFile.VpdInfoFile()
|
||||
NeedProcessVpdMapFile = False
|
||||
|
||||
if (self.Workspace.ArchList[-1] == self.Arch):
|
||||
for Pcd in self._DynamicPcdList:
|
||||
|
||||
# just pick the a value to determine whether is unicode string type
|
||||
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
|
||||
Sku.VpdOffset = Sku.VpdOffset.strip()
|
||||
@ -500,32 +594,47 @@ class PlatformAutoGen(AutoGen):
|
||||
HiiPcdArray.append(Pcd)
|
||||
else:
|
||||
OtherPcdArray.append(Pcd)
|
||||
|
||||
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
|
||||
if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
|
||||
#
|
||||
# Fix the optional data of VPD PCD.
|
||||
#
|
||||
if (Pcd.DatumType.strip() != "VOID*"):
|
||||
if Sku.DefaultValue == '':
|
||||
Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue = Pcd.MaxDatumSize
|
||||
Pcd.MaxDatumSize = None
|
||||
else:
|
||||
EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
|
||||
File=self.MetaFile,
|
||||
ExtraData="\n\tPCD: %s.%s format incorrect in DSC: %s\n\t\t\n"
|
||||
% (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path))
|
||||
|
||||
VpdFile.Add(Pcd, Sku.VpdOffset)
|
||||
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
|
||||
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
|
||||
NeedProcessVpdMapFile = True
|
||||
VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
|
||||
|
||||
PlatformPcds = self.Platform.Pcds.keys()
|
||||
PlatformPcds.sort()
|
||||
#
|
||||
# Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
|
||||
#
|
||||
for PcdKey in PlatformPcds:
|
||||
Pcd = self.Platform.Pcds[PcdKey]
|
||||
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
|
||||
Pcd = VpdPcdDict[PcdKey]
|
||||
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
|
||||
Sku.VpdOffset = Sku.VpdOffset.strip()
|
||||
#
|
||||
# Fix the optional data of VPD PCD.
|
||||
#
|
||||
if (Pcd.DatumType.strip() != "VOID*"):
|
||||
if Sku.DefaultValue == '':
|
||||
Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue = Pcd.MaxDatumSize
|
||||
Pcd.MaxDatumSize = None
|
||||
else:
|
||||
EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
|
||||
File=self.MetaFile,
|
||||
ExtraData="\n\tPCD: %s.%s format incorrect in DSC: %s\n\t\t\n"
|
||||
% (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path))
|
||||
|
||||
VpdFile.Add(Pcd, Sku.VpdOffset)
|
||||
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
|
||||
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
|
||||
NeedProcessVpdMapFile = True
|
||||
if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
|
||||
EdkLogger.error("Build", FILE_NOT_FOUND, \
|
||||
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
|
||||
|
||||
|
||||
#
|
||||
# Fix the PCDs define in VPD PCD section that never referenced by module.
|
||||
# An example is PCD for signature usage.
|
||||
#
|
||||
for DscPcd in self.Platform.Pcds:
|
||||
#
|
||||
for DscPcd in PlatformPcds:
|
||||
DscPcdEntry = self.Platform.Pcds[DscPcd]
|
||||
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
|
||||
if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
|
||||
@ -614,7 +723,7 @@ class PlatformAutoGen(AutoGen):
|
||||
# just pick the a value to determine whether is unicode string type
|
||||
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
|
||||
if Sku.VpdOffset == "*":
|
||||
Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]
|
||||
Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]
|
||||
else:
|
||||
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
|
||||
|
||||
@ -693,7 +802,7 @@ class PlatformAutoGen(AutoGen):
|
||||
if "FLAGS" in self.ToolDefinition["MAKE"]:
|
||||
NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()
|
||||
if NewOption != '':
|
||||
self._BuildCommand += SplitOption(NewOption)
|
||||
self._BuildCommand += SplitOption(NewOption)
|
||||
return self._BuildCommand
|
||||
|
||||
## Get tool chain definition
|
||||
@ -1220,16 +1329,86 @@ class PlatformAutoGen(AutoGen):
|
||||
EdkLogger.verbose("\t" + LibraryName + " : " + str(Library) + ' ' + str(type(Library)))
|
||||
return LibraryList
|
||||
|
||||
## Calculate the priority value of the build option
|
||||
#
|
||||
# @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
#
|
||||
# @retval Value Priority value based on the priority list.
|
||||
#
|
||||
def CalculatePriorityValue(self, Key):
|
||||
Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
|
||||
PriorityValue = 0x11111
|
||||
if Target == "*":
|
||||
PriorityValue &= 0x01111
|
||||
if ToolChain == "*":
|
||||
PriorityValue &= 0x10111
|
||||
if Arch == "*":
|
||||
PriorityValue &= 0x11011
|
||||
if CommandType == "*":
|
||||
PriorityValue &= 0x11101
|
||||
if Attr == "*":
|
||||
PriorityValue &= 0x11110
|
||||
|
||||
return self.PrioList["0x%0.5x"%PriorityValue]
|
||||
|
||||
|
||||
## Expand * in build option key
|
||||
#
|
||||
# @param Options Options to be expanded
|
||||
#
|
||||
# @retval options Options expanded
|
||||
#
|
||||
#
|
||||
def _ExpandBuildOption(self, Options, ModuleStyle=None):
|
||||
BuildOptions = {}
|
||||
FamilyMatch = False
|
||||
FamilyIsNull = True
|
||||
|
||||
OverrideList = {}
|
||||
#
|
||||
# Construct a list contain the build options which need override.
|
||||
#
|
||||
for Key in Options:
|
||||
#
|
||||
# Key[0] -- tool family
|
||||
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
#
|
||||
if Key[0] == self.BuildRuleFamily :
|
||||
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
|
||||
if Target == self.BuildTarget or Target == "*":
|
||||
if ToolChain == self.ToolChain or ToolChain == "*":
|
||||
if Arch == self.Arch or Arch == "*":
|
||||
if Options[Key].startswith("="):
|
||||
if OverrideList.get(Key[1]) != None:
|
||||
OverrideList.pop(Key[1])
|
||||
OverrideList[Key[1]] = Options[Key]
|
||||
|
||||
#
|
||||
# Use the highest priority value.
|
||||
#
|
||||
if (len(OverrideList) >= 2):
|
||||
KeyList = OverrideList.keys()
|
||||
for Index in range(len(KeyList)):
|
||||
NowKey = KeyList[Index]
|
||||
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
|
||||
for Index1 in range(len(KeyList) - Index - 1):
|
||||
NextKey = KeyList[Index1 + Index + 1]
|
||||
#
|
||||
# Compare two Key, if one is included by another, choose the higher priority one
|
||||
#
|
||||
Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
|
||||
if Target1 == Target2 or Target1 == "*" or Target2 == "*":
|
||||
if ToolChain1 == ToolChain2 or ToolChain1 == "*" or ToolChain2 == "*":
|
||||
if Arch1 == Arch2 or Arch1 == "*" or Arch2 == "*":
|
||||
if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":
|
||||
if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":
|
||||
if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):
|
||||
if Options.get((self.BuildRuleFamily, NextKey)) != None:
|
||||
Options.pop((self.BuildRuleFamily, NextKey))
|
||||
else:
|
||||
if Options.get((self.BuildRuleFamily, NowKey)) != None:
|
||||
Options.pop((self.BuildRuleFamily, NowKey))
|
||||
|
||||
|
||||
for Key in Options:
|
||||
if ModuleStyle != None and len (Key) > 2:
|
||||
# Check Module style is EDK or EDKII.
|
||||
|
@ -1860,8 +1860,10 @@ def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH, UniGenCFlag, UniGenBinBuff
|
||||
|
||||
IncList = [Info.MetaFile.Dir]
|
||||
# Get all files under [Sources] section in inf file for EDK-II module
|
||||
EDK2Module = True
|
||||
SrcList = [F for F in Info.SourceFileList]
|
||||
if Info.AutoGenVersion < 0x00010005:
|
||||
EDK2Module = False
|
||||
# Get all files under the module directory for EDK-I module
|
||||
Cwd = os.getcwd()
|
||||
os.chdir(Info.MetaFile.Dir)
|
||||
@ -1883,7 +1885,7 @@ def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH, UniGenCFlag, UniGenBinBuff
|
||||
CompatibleMode = False
|
||||
|
||||
#
|
||||
# -s is a temporary option dedicated for building .UNI files with ISO 639-2 lanauge codes of EDK Shell in EDK2
|
||||
# -s is a temporary option dedicated for building .UNI files with ISO 639-2 language codes of EDK Shell in EDK2
|
||||
#
|
||||
if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-s') > -1:
|
||||
if CompatibleMode:
|
||||
@ -1894,7 +1896,12 @@ def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH, UniGenCFlag, UniGenBinBuff
|
||||
else:
|
||||
ShellMode = False
|
||||
|
||||
Header, Code = GetStringFiles(Info.UnicodeFileList, SrcList, IncList, Info.IncludePathList, ['.uni', '.inf'], Info.Name, CompatibleMode, ShellMode, UniGenCFlag, UniGenBinBuffer)
|
||||
#RFC4646 is only for EDKII modules and ISO639-2 for EDK modules
|
||||
if EDK2Module:
|
||||
FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.RFCLanguages]
|
||||
else:
|
||||
FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.ISOLanguages]
|
||||
Header, Code = GetStringFiles(Info.UnicodeFileList, SrcList, IncList, Info.IncludePathList, ['.uni', '.inf'], Info.Name, CompatibleMode, ShellMode, UniGenCFlag, UniGenBinBuffer, FilterInfo)
|
||||
if CompatibleMode or UniGenCFlag:
|
||||
AutoGenC.Append("\n//\n//Unicode String Pack Definition\n//\n")
|
||||
AutoGenC.Append(Code)
|
||||
|
@ -284,6 +284,65 @@ def CreateCFileStringValue(Value):
|
||||
|
||||
return Str
|
||||
|
||||
## GetFilteredLanguage
|
||||
#
|
||||
# apply get best language rules to the UNI language code list
|
||||
#
|
||||
# @param UniLanguageList: language code definition list in *.UNI file
|
||||
# @param LanguageFilterList: language code filter list of RFC4646 format in DSC file
|
||||
#
|
||||
# @retval UniLanguageListFiltered: the filtered language code
|
||||
#
|
||||
def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
|
||||
UniLanguageListFiltered = []
|
||||
# if filter list is empty, then consider there is no filter
|
||||
if LanguageFilterList == []:
|
||||
UniLanguageListFiltered = UniLanguageList
|
||||
return UniLanguageListFiltered
|
||||
for Language in LanguageFilterList:
|
||||
# first check for exact match
|
||||
if Language in UniLanguageList:
|
||||
if Language not in UniLanguageListFiltered:
|
||||
UniLanguageListFiltered += [Language]
|
||||
# find the first one with the same/equivalent primary tag
|
||||
else:
|
||||
if Language.find('-') != -1:
|
||||
PrimaryTag = Language[0:Language.find('-')].lower()
|
||||
else:
|
||||
PrimaryTag = Language
|
||||
|
||||
if len(PrimaryTag) == 3:
|
||||
PrimaryTag = LangConvTable.get(PrimaryTag)
|
||||
|
||||
for UniLanguage in UniLanguageList:
|
||||
if UniLanguage.find('-') != -1:
|
||||
UniLanguagePrimaryTag = UniLanguage[0:UniLanguage.find('-')].lower()
|
||||
else:
|
||||
UniLanguagePrimaryTag = UniLanguage
|
||||
|
||||
if len(UniLanguagePrimaryTag) == 3:
|
||||
UniLanguagePrimaryTag = LangConvTable.get(UniLanguagePrimaryTag)
|
||||
|
||||
if PrimaryTag == UniLanguagePrimaryTag:
|
||||
if UniLanguage not in UniLanguageListFiltered:
|
||||
UniLanguageListFiltered += [UniLanguage]
|
||||
break
|
||||
else:
|
||||
# Here is rule 3 for "get best language"
|
||||
# If tag is not listed in the Unicode file, the default ("en") tag should be used for that language
|
||||
# for better processing, find the one that best suit for it.
|
||||
DefaultTag = 'en'
|
||||
if DefaultTag not in UniLanguageListFiltered:
|
||||
# check whether language code with primary code equivalent with DefaultTag already in the list, if so, use that
|
||||
for UniLanguage in UniLanguageList:
|
||||
if UniLanguage.startswith('en-') or UniLanguage.startswith('eng-'):
|
||||
if UniLanguage not in UniLanguageListFiltered:
|
||||
UniLanguageListFiltered += [UniLanguage]
|
||||
break
|
||||
else:
|
||||
UniLanguageListFiltered += [DefaultTag]
|
||||
return UniLanguageListFiltered
|
||||
|
||||
|
||||
## Create content of .c file
|
||||
#
|
||||
@ -293,10 +352,11 @@ def CreateCFileStringValue(Value):
|
||||
# @param UniObjectClass A UniObjectClass instance
|
||||
# @param IsCompatibleMode Compatible mode
|
||||
# @param UniBinBuffer UniBinBuffer to contain UniBinary data.
|
||||
# @param FilterInfo Platform language filter information
|
||||
#
|
||||
# @retval Str: A string of .c file content
|
||||
#
|
||||
def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer=None):
|
||||
def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer, FilterInfo):
|
||||
#
|
||||
# Init array length
|
||||
#
|
||||
@ -304,13 +364,29 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer=
|
||||
Str = ''
|
||||
Offset = 0
|
||||
|
||||
EDK2Module = FilterInfo[0]
|
||||
if EDK2Module:
|
||||
LanguageFilterList = FilterInfo[1]
|
||||
else:
|
||||
# EDK module is using ISO639-2 format filter, convert to the RFC4646 format
|
||||
LanguageFilterList = [LangConvTable.get(F.lower()) for F in FilterInfo[1]]
|
||||
|
||||
UniLanguageList = []
|
||||
for IndexI in range(len(UniObjectClass.LanguageDef)):
|
||||
UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
|
||||
|
||||
UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
|
||||
|
||||
|
||||
#
|
||||
# Create lines for each language's strings
|
||||
#
|
||||
for IndexI in range(len(UniObjectClass.LanguageDef)):
|
||||
Language = UniObjectClass.LanguageDef[IndexI][0]
|
||||
LangPrintName = UniObjectClass.LanguageDef[IndexI][1]
|
||||
|
||||
if Language not in UniLanguageListFiltered:
|
||||
continue
|
||||
|
||||
StringBuffer = StringIO()
|
||||
StrStringValue = ''
|
||||
ArrayLength = 0
|
||||
@ -428,13 +504,14 @@ def CreateCFileEnd():
|
||||
# @param BaseName: The basename of strings
|
||||
# @param UniObjectClass A UniObjectClass instance
|
||||
# @param IsCompatibleMode Compatible Mode
|
||||
# @param FilterInfo Platform language filter information
|
||||
#
|
||||
# @retval CFile: A string of complete .c file
|
||||
# @retval CFile: A string of complete .c file
|
||||
#
|
||||
def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode):
|
||||
def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
|
||||
CFile = ''
|
||||
#CFile = WriteLine(CFile, CreateCFileHeader())
|
||||
CFile = WriteLine(CFile, CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode))
|
||||
CFile = WriteLine(CFile, CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, None, FilterInfo))
|
||||
CFile = WriteLine(CFile, CreateCFileEnd())
|
||||
return CFile
|
||||
|
||||
@ -518,7 +595,7 @@ def SearchString(UniObjectClass, FileList, IsCompatibleMode):
|
||||
# This function is used for UEFI2.1 spec
|
||||
#
|
||||
#
|
||||
def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None):
|
||||
def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
|
||||
Status = True
|
||||
ErrorMessage = ''
|
||||
|
||||
@ -540,9 +617,9 @@ def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, Ski
|
||||
HFile = CreateHFile(BaseName, Uni, IsCompatibleMode, UniGenCFlag)
|
||||
CFile = None
|
||||
if IsCompatibleMode or UniGenCFlag:
|
||||
CFile = CreateCFile(BaseName, Uni, IsCompatibleMode)
|
||||
CFile = CreateCFile(BaseName, Uni, IsCompatibleMode, FilterInfo)
|
||||
if UniGenBinBuffer:
|
||||
CreateCFileContent(BaseName, Uni, IsCompatibleMode, UniGenBinBuffer)
|
||||
CreateCFileContent(BaseName, Uni, IsCompatibleMode, UniGenBinBuffer, FilterInfo)
|
||||
|
||||
return HFile, CFile
|
||||
|
||||
|
@ -17,6 +17,7 @@ import os
|
||||
import StringIO
|
||||
import StringTable as st
|
||||
import array
|
||||
import re
|
||||
|
||||
from struct import *
|
||||
import Common.EdkLogger as EdkLogger
|
||||
@ -338,8 +339,27 @@ class GenVPD :
|
||||
line = line.rstrip(os.linesep)
|
||||
|
||||
# Skip the comment line
|
||||
if (not line.startswith("#")) and len(line) > 1 :
|
||||
self.FileLinesList[count] = line.split('|')
|
||||
if (not line.startswith("#")) and len(line) > 1 :
|
||||
#
|
||||
# Enhanced for support "|" character in the string.
|
||||
#
|
||||
ValueList = ['', '', '', '']
|
||||
|
||||
ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
|
||||
PtrValue = ValueRe.findall(line)
|
||||
|
||||
ValueUpdateFlag = False
|
||||
|
||||
if len(PtrValue) >= 1:
|
||||
line = re.sub(ValueRe, '', line)
|
||||
ValueUpdateFlag = True
|
||||
|
||||
TokenList = line.split('|')
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
|
||||
if ValueUpdateFlag:
|
||||
ValueList[3] = PtrValue[0]
|
||||
self.FileLinesList[count] = ValueList
|
||||
# Store the line number
|
||||
self.FileLinesList[count].append(str(count+1))
|
||||
elif len(line) <= 1 :
|
||||
@ -524,10 +544,8 @@ class GenVPD :
|
||||
if lenOfUnfixedList != 0 :
|
||||
countOfUnfixedList = 0
|
||||
while(countOfUnfixedList < lenOfUnfixedList) :
|
||||
#needFixPcdCName, needFixPcdOffset, needFixPcdSize, needFixPcdValue, needFixUnpackValue = self.PcdUnknownOffsetList[countOfUnfixedList][0:6]
|
||||
eachUnfixedPcd = self.PcdUnknownOffsetList[countOfUnfixedList]
|
||||
needFixPcdSize = eachUnfixedPcd.PcdBinSize
|
||||
needFixPcdOffset = eachUnfixedPcd.PcdOffset
|
||||
# Not been fixed
|
||||
if eachUnfixedPcd.PcdOffset == '*' :
|
||||
# The offset un-fixed pcd can write into this free space
|
||||
@ -546,18 +564,16 @@ class GenVPD :
|
||||
FixOffsetSizeListCount += 1
|
||||
|
||||
# Decrease the un-fixed pcd offset list's length
|
||||
countOfUnfixedList += 1
|
||||
lenOfUnfixedList -= 1
|
||||
|
||||
# Modify the last offset value
|
||||
LastOffset += needFixPcdSize
|
||||
continue
|
||||
LastOffset += needFixPcdSize
|
||||
else :
|
||||
# It can not insert into those two pcds, need to check stiil has other space can store it.
|
||||
# It can not insert into those two pcds, need to check still has other space can store it.
|
||||
LastOffset = NowOffset + self.PcdFixedOffsetSizeList[FixOffsetSizeListCount].PcdBinSize
|
||||
FixOffsetSizeListCount += 1
|
||||
break
|
||||
else :
|
||||
continue
|
||||
break
|
||||
|
||||
# Set the FixOffsetSizeListCount = lenOfList for quit the loop
|
||||
else :
|
||||
FixOffsetSizeListCount = lenOfList
|
||||
|
@ -22,6 +22,7 @@ TAB_EQUAL_SPLIT = '='
|
||||
TAB_VALUE_SPLIT = '|'
|
||||
TAB_COMMA_SPLIT = ','
|
||||
TAB_SPACE_SPLIT = ' '
|
||||
TAB_SEMI_COLON_SPLIT = ';'
|
||||
TAB_SECTION_START = '['
|
||||
TAB_SECTION_END = ']'
|
||||
TAB_OPTION_START = '<'
|
||||
@ -353,6 +354,8 @@ TAB_DSC_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
|
||||
TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
|
||||
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
|
||||
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
|
||||
TAB_DSC_DEFINES_RFC_LANGUAGES = 'RFC_LANGUAGES'
|
||||
TAB_DSC_DEFINES_ISO_LANGUAGES = 'ISO_LANGUAGES'
|
||||
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
|
||||
TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'
|
||||
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
|
||||
|
@ -1,7 +1,7 @@
|
||||
## @file
|
||||
# Common routines used by all tools
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
|
||||
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
@ -28,7 +28,7 @@ from UserList import UserList
|
||||
|
||||
from Common import EdkLogger as EdkLogger
|
||||
from Common import GlobalData as GlobalData
|
||||
|
||||
from DataType import *
|
||||
from BuildToolError import *
|
||||
|
||||
## Regular expression used to find out place holders in string template
|
||||
@ -1166,6 +1166,93 @@ def ParseConsoleLog(Filename):
|
||||
Opr.close()
|
||||
Opw.close()
|
||||
|
||||
## AnalyzePcdData
|
||||
#
|
||||
# Analyze the pcd Value, Datum type and TokenNumber.
|
||||
# Used to avoid split issue while the value string contain "|" character
|
||||
#
|
||||
# @param[in] Setting: A String contain value/datum type/token number information;
|
||||
#
|
||||
# @retval ValueList: A List contain value, datum type and toke number.
|
||||
#
|
||||
def AnalyzePcdData(Setting):
|
||||
ValueList = ['', '', '']
|
||||
|
||||
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
|
||||
PtrValue = ValueRe.findall(Setting)
|
||||
|
||||
ValueUpdateFlag = False
|
||||
|
||||
if len(PtrValue) >= 1:
|
||||
Setting = re.sub(ValueRe, '', Setting)
|
||||
ValueUpdateFlag = True
|
||||
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
|
||||
if ValueUpdateFlag:
|
||||
ValueList[0] = PtrValue[0]
|
||||
|
||||
return ValueList
|
||||
|
||||
## AnalyzeHiiPcdData
|
||||
#
|
||||
# Analyze the pcd Value, variable name, variable Guid and variable offset.
|
||||
# Used to avoid split issue while the value string contain "|" character
|
||||
#
|
||||
# @param[in] Setting: A String contain VariableName, VariableGuid, VariableOffset, DefaultValue information;
|
||||
#
|
||||
# @retval ValueList: A List contaian VariableName, VariableGuid, VariableOffset, DefaultValue.
|
||||
#
|
||||
def AnalyzeHiiPcdData(Setting):
|
||||
ValueList = ['', '', '', '']
|
||||
|
||||
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
|
||||
PtrValue = ValueRe.findall(Setting)
|
||||
|
||||
ValueUpdateFlag = False
|
||||
|
||||
if len(PtrValue) >= 1:
|
||||
Setting = re.sub(ValueRe, '', Setting)
|
||||
ValueUpdateFlag = True
|
||||
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
|
||||
if ValueUpdateFlag:
|
||||
ValueList[0] = PtrValue[0]
|
||||
|
||||
return ValueList
|
||||
|
||||
## AnalyzeVpdPcdData
|
||||
#
|
||||
# Analyze the vpd pcd Value, Datum type and TokenNumber.
|
||||
# Used to avoid split issue while the value string contain "|" character
|
||||
#
|
||||
# @param[in] Setting: A String contain value/datum type/token number information;
|
||||
#
|
||||
# @retval ValueList: A List contain value, datum type and toke number.
|
||||
#
|
||||
def AnalyzeVpdPcdData(Setting):
|
||||
ValueList = ['', '', '']
|
||||
|
||||
ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
|
||||
PtrValue = ValueRe.findall(Setting)
|
||||
|
||||
ValueUpdateFlag = False
|
||||
|
||||
if len(PtrValue) >= 1:
|
||||
Setting = re.sub(ValueRe, '', Setting)
|
||||
ValueUpdateFlag = True
|
||||
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
|
||||
if ValueUpdateFlag:
|
||||
ValueList[2] = PtrValue[0]
|
||||
|
||||
return ValueList
|
||||
|
||||
## check format of PCD value against its the datum type
|
||||
#
|
||||
# For PCD value setting
|
||||
|
@ -18,70 +18,6 @@ from String import *
|
||||
from CommonDataClass.DataClass import *
|
||||
from DataType import *
|
||||
|
||||
## ParseContent
|
||||
#
|
||||
# Parse content of a DSC/INF/DEC file
|
||||
#
|
||||
def ParseContent(Lines, ):
|
||||
for Line in Lines:
|
||||
LineNo = LineNo + 1
|
||||
#
|
||||
# Remove comments at tail and remove spaces again
|
||||
#
|
||||
Line = CleanString(Line)
|
||||
if Line == '':
|
||||
continue
|
||||
|
||||
#
|
||||
# Find a new section tab
|
||||
# First insert previous section items
|
||||
# And then parse the content of the new section
|
||||
#
|
||||
if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
|
||||
#
|
||||
# Insert items data of previous section
|
||||
#
|
||||
self.InsertSectionItemsIntoDatabase(FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList)
|
||||
#
|
||||
# Parse the new section
|
||||
#
|
||||
SectionItemList = []
|
||||
ArchList = []
|
||||
ThirdList = []
|
||||
|
||||
LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
|
||||
for Item in LineList:
|
||||
ItemList = GetSplitValueList(Item, TAB_SPLIT)
|
||||
CurrentSection = ItemList[0]
|
||||
if CurrentSection.upper() not in self.KeyList:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
ItemList.append('')
|
||||
ItemList.append('')
|
||||
if len(ItemList) > 5:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
else:
|
||||
if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
|
||||
EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo)
|
||||
ArchList.append(ItemList[1].upper())
|
||||
ThirdList.append(ItemList[2])
|
||||
|
||||
continue
|
||||
|
||||
#
|
||||
# Not in any defined section
|
||||
#
|
||||
if CurrentSection == TAB_UNKNOWN:
|
||||
ErrorMsg = "%s is not in any defined section" % Line
|
||||
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo)
|
||||
|
||||
#
|
||||
# Add a section item
|
||||
#
|
||||
SectionItemList.append([Line, LineNo])
|
||||
# End of parse
|
||||
#End of For
|
||||
|
||||
|
||||
## ParseDefineMacro
|
||||
#
|
||||
# Search whole table to find all defined Macro and replaced them with the real values
|
||||
@ -940,4 +876,4 @@ def GenMetaDatSectionItem(Key, Value, List):
|
||||
if Key not in List:
|
||||
List[Key] = [Value]
|
||||
else:
|
||||
List[Key].append(Value)
|
||||
List[Key].append(Value)
|
||||
|
@ -135,7 +135,9 @@ class VpdInfoFile:
|
||||
fd.write(FILE_COMMENT_TEMPLATE)
|
||||
|
||||
# write each of PCD in VPD type
|
||||
for Pcd in self._VpdArray.keys():
|
||||
Pcds = self._VpdArray.keys()
|
||||
Pcds.sort()
|
||||
for Pcd in Pcds:
|
||||
for Offset in self._VpdArray[Pcd]:
|
||||
PcdValue = str(Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue).strip()
|
||||
if PcdValue == "" :
|
||||
|
@ -565,9 +565,9 @@ class FdfParser:
|
||||
self.Rewind()
|
||||
|
||||
|
||||
## PreprocessIncludeFile() method
|
||||
## PreprocessConditionalStatement() method
|
||||
#
|
||||
# Preprocess file contents, replace !include statements with file contents.
|
||||
# Preprocess conditional statement.
|
||||
# In the end, rewind the file buffer pointer to the beginning
|
||||
#
|
||||
# @param self The object pointer
|
||||
@ -1264,6 +1264,12 @@ class FdfParser:
|
||||
raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
|
||||
|
||||
while self.__GetNextWord():
|
||||
# handle the SET statement
|
||||
if self.__Token == 'SET':
|
||||
self.__UndoToken()
|
||||
self.__GetSetStatement(None)
|
||||
continue
|
||||
|
||||
Macro = self.__Token
|
||||
|
||||
if not self.__IsToken("="):
|
||||
@ -1489,7 +1495,7 @@ class FdfParser:
|
||||
|
||||
for Item in Obj.BlockSizeList:
|
||||
if Item[0] == None or Item[1] == None:
|
||||
raise Warning("expected block statement for Fd Section", self.FileName, self.CurrentLineNumber)
|
||||
raise Warning("expected block statement", self.FileName, self.CurrentLineNumber)
|
||||
|
||||
## __GetBlockStatement() method
|
||||
#
|
||||
@ -1508,7 +1514,7 @@ class FdfParser:
|
||||
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
|
||||
|
||||
if not self.__GetNextHexNumber() and not self.__GetNextDecimalNumber():
|
||||
raise Warning("expected Hex block size", self.FileName, self.CurrentLineNumber)
|
||||
raise Warning("expected Hex or Integer block size", self.FileName, self.CurrentLineNumber)
|
||||
|
||||
BlockSize = self.__Token
|
||||
BlockSizePcd = None
|
||||
@ -1609,7 +1615,8 @@ class FdfParser:
|
||||
raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
|
||||
Value += self.__SkippedChars
|
||||
|
||||
Obj.SetVarDict[PcdPair] = Value
|
||||
if Obj:
|
||||
Obj.SetVarDict[PcdPair] = Value
|
||||
self.Profile.PcdDict[PcdPair] = Value
|
||||
return True
|
||||
|
||||
@ -1904,7 +1911,8 @@ class FdfParser:
|
||||
|
||||
self.__GetAddressStatements(FvObj)
|
||||
|
||||
self.__GetBlockStatement(FvObj)
|
||||
while self.__GetBlockStatement(FvObj):
|
||||
pass
|
||||
|
||||
self.__GetSetStatements(FvObj)
|
||||
|
||||
|
@ -161,6 +161,14 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
||||
#
|
||||
|
||||
self.__InfParse__(Dict)
|
||||
|
||||
#
|
||||
# Allow binary type module not specify override rule in FDF file.
|
||||
#
|
||||
if len(self.BinFileList) >0 and not self.InDsc:
|
||||
if self.Rule == None or self.Rule == "":
|
||||
self.Rule = "BINARY"
|
||||
|
||||
#
|
||||
# Get the rule of how to generate Ffs file
|
||||
#
|
||||
|
@ -351,16 +351,14 @@ class GenFds :
|
||||
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(GenFds.OnlyGenerateThisFv.upper())
|
||||
if FvObj != None:
|
||||
Buffer = StringIO.StringIO()
|
||||
# Get FV base Address
|
||||
FvObj.AddToBuffer(Buffer, None, GenFds.GetFvBlockSize(FvObj))
|
||||
FvObj.AddToBuffer(Buffer)
|
||||
Buffer.close()
|
||||
return
|
||||
elif GenFds.OnlyGenerateThisFv == None:
|
||||
for FvName in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
|
||||
Buffer = StringIO.StringIO('')
|
||||
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[FvName]
|
||||
# Get FV base Address
|
||||
FvObj.AddToBuffer(Buffer, None, GenFds.GetFvBlockSize(FvObj))
|
||||
FvObj.AddToBuffer(Buffer)
|
||||
Buffer.close()
|
||||
|
||||
if GenFds.OnlyGenerateThisFv == None and GenFds.OnlyGenerateThisFd == None:
|
||||
@ -453,7 +451,12 @@ class GenFds :
|
||||
TotalSizeValue = long(FvSpaceInfo[1], 0)
|
||||
UsedSizeValue = long(FvSpaceInfo[2], 0)
|
||||
FreeSizeValue = long(FvSpaceInfo[3], 0)
|
||||
GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + str((UsedSizeValue+0.0)/TotalSizeValue)[0:4].lstrip('0.') + '%Full] ' + str(TotalSizeValue) + ' total, ' + str(UsedSizeValue) + ' used, ' + str(FreeSizeValue) + ' free')
|
||||
if UsedSizeValue == TotalSizeValue:
|
||||
Percentage = '100'
|
||||
else:
|
||||
Percentage = str((UsedSizeValue+0.0)/TotalSizeValue)[0:4].lstrip('0.')
|
||||
|
||||
GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + Percentage + '%Full] ' + str(TotalSizeValue) + ' total, ' + str(UsedSizeValue) + ' used, ' + str(FreeSizeValue) + ' free')
|
||||
|
||||
## PreprocessImage()
|
||||
#
|
||||
|
@ -1,7 +1,7 @@
|
||||
## @file
|
||||
# process FD Region generation
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
|
||||
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
|
||||
#
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
@ -62,9 +62,6 @@ class Region(RegionClassObject):
|
||||
#
|
||||
# Get Fv from FvDict
|
||||
#
|
||||
RegionBlockSize = self.BlockSizeOfRegion(BlockSizeList)
|
||||
RegionBlockNum = self.BlockNumOfRegion(RegionBlockSize)
|
||||
|
||||
self.FvAddress = int(BaseAddress, 16) + self.Offset
|
||||
FvBaseAddress = '0x%X' %self.FvAddress
|
||||
FvOffset = 0
|
||||
@ -95,13 +92,7 @@ class Region(RegionClassObject):
|
||||
#
|
||||
# Call GenFv tool
|
||||
#
|
||||
BlockSize = RegionBlockSize
|
||||
BlockNum = RegionBlockNum
|
||||
if FvObj.BlockSizeList != []:
|
||||
if FvObj.BlockSizeList[0][0] != None:
|
||||
BlockSize = FvObj.BlockSizeList[0][0]
|
||||
if FvObj.BlockSizeList[0][1] != None:
|
||||
BlockNum = FvObj.BlockSizeList[0][1]
|
||||
self.BlockInfoOfRegion(BlockSizeList, FvObj)
|
||||
self.FvAddress = self.FvAddress + FvOffset
|
||||
FvAlignValue = self.GetFvAlignValue(FvObj.FvAlignment)
|
||||
if self.FvAddress % FvAlignValue != 0:
|
||||
@ -109,6 +100,8 @@ class Region(RegionClassObject):
|
||||
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
|
||||
FvBuffer = StringIO.StringIO('')
|
||||
FvBaseAddress = '0x%X' %self.FvAddress
|
||||
BlockSize = None
|
||||
BlockNum = None
|
||||
FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict)
|
||||
if FvBuffer.len > Size:
|
||||
FvBuffer.close()
|
||||
@ -288,38 +281,74 @@ class Region(RegionClassObject):
|
||||
|
||||
AlignValue = int(Str)*Granu
|
||||
return AlignValue
|
||||
|
||||
## BlockSizeOfRegion()
|
||||
#
|
||||
# @param BlockSizeList List of block information
|
||||
# @retval int Block size of region
|
||||
# @param FvObj The object for FV
|
||||
#
|
||||
def BlockSizeOfRegion(self, BlockSizeList):
|
||||
Offset = 0x00
|
||||
BlockSize = 0
|
||||
for item in BlockSizeList:
|
||||
Offset = Offset + item[0] * item[1]
|
||||
GenFdsGlobalVariable.VerboseLogger ("Offset = 0x%X" %Offset)
|
||||
GenFdsGlobalVariable.VerboseLogger ("self.Offset 0x%X" %self.Offset)
|
||||
def BlockInfoOfRegion(self, BlockSizeList, FvObj):
|
||||
Start = 0
|
||||
End = 0
|
||||
RemindingSize = self.Size
|
||||
ExpectedList = []
|
||||
for (BlockSize, BlockNum, pcd) in BlockSizeList:
|
||||
End = Start + BlockSize * BlockNum
|
||||
# region not started yet
|
||||
if self.Offset >= End:
|
||||
Start = End
|
||||
continue
|
||||
# region located in current blocks
|
||||
else:
|
||||
# region ended within current blocks
|
||||
if self.Offset + self.Size <= End:
|
||||
ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1)/BlockSize))
|
||||
break
|
||||
# region not ended yet
|
||||
else:
|
||||
# region not started in middle of current blocks
|
||||
if self.Offset <= Start:
|
||||
UsedBlockNum = BlockNum
|
||||
# region started in middle of current blocks
|
||||
else:
|
||||
UsedBlockNum = (End - self.Offset)/BlockSize
|
||||
Start = End
|
||||
ExpectedList.append((BlockSize, UsedBlockNum))
|
||||
RemindingSize -= BlockSize * UsedBlockNum
|
||||
|
||||
if FvObj.BlockSizeList == []:
|
||||
FvObj.BlockSizeList = ExpectedList
|
||||
else:
|
||||
# first check whether FvObj.BlockSizeList items have only "BlockSize" or "NumBlocks",
|
||||
# if so, use ExpectedList
|
||||
for Item in FvObj.BlockSizeList:
|
||||
if Item[0] == None or Item[1] == None:
|
||||
FvObj.BlockSizeList = ExpectedList
|
||||
break
|
||||
# make sure region size is no smaller than the summed block size in FV
|
||||
Sum = 0
|
||||
for Item in FvObj.BlockSizeList:
|
||||
Sum += Item[0] * Item[1]
|
||||
if self.Size < Sum:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "Total Size of FV %s 0x%x is larger than Region Size 0x%x "
|
||||
%(FvObj.UiFvName, Sum, self.Size))
|
||||
# check whether the BlockStatements in FV section is appropriate
|
||||
ExpectedListData = ''
|
||||
for Item in ExpectedList:
|
||||
ExpectedListData += "BlockSize = 0x%x\n\tNumBlocks = 0x%x\n\t"%Item
|
||||
Index = 0
|
||||
for Item in FvObj.BlockSizeList:
|
||||
if Item[0] != ExpectedList[Index][0]:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"
|
||||
%FvObj.UiFvName, ExtraData = ExpectedListData)
|
||||
elif Item[1] != ExpectedList[Index][1]:
|
||||
if (Item[1] < ExpectedList[Index][1]) and (Index == len(FvObj.BlockSizeList) - 1):
|
||||
break;
|
||||
else:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"
|
||||
%FvObj.UiFvName, ExtraData = ExpectedListData)
|
||||
else:
|
||||
Index += 1
|
||||
|
||||
if self.Offset < Offset :
|
||||
if Offset - self.Offset < self.Size:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR,
|
||||
"Region at Offset 0x%X can NOT fit into Block array with BlockSize %X" \
|
||||
% (self.Offset, item[0]))
|
||||
BlockSize = item[0]
|
||||
GenFdsGlobalVariable.VerboseLogger ("BlockSize = %X" %BlockSize)
|
||||
return BlockSize
|
||||
return BlockSize
|
||||
|
||||
## BlockNumOfRegion()
|
||||
#
|
||||
# @param BlockSize block size of region
|
||||
# @retval int Block number of region
|
||||
#
|
||||
def BlockNumOfRegion (self, BlockSize):
|
||||
if BlockSize == 0 :
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "Region: %s is not in the FD address scope!" % self.Offset)
|
||||
BlockNum = self.Size / BlockSize
|
||||
GenFdsGlobalVariable.VerboseLogger ("BlockNum = 0x%X" %BlockNum)
|
||||
return BlockNum
|
||||
|
||||
|
||||
|
@ -1209,7 +1209,19 @@ class DecParser(MetaFileParser):
|
||||
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
|
||||
File=self.MetaFile, Line=self._LineIndex+1)
|
||||
|
||||
ValueList = GetSplitValueList(TokenList[1])
|
||||
|
||||
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
|
||||
PtrValue = ValueRe.findall(TokenList[1])
|
||||
|
||||
# Has VOID* type string, may contain "|" character in the string.
|
||||
if len(PtrValue) != 0:
|
||||
ptrValueList = re.sub(ValueRe, '', TokenList[1])
|
||||
ValueList = GetSplitValueList(ptrValueList)
|
||||
ValueList[0] = PtrValue[0]
|
||||
else:
|
||||
ValueList = GetSplitValueList(TokenList[1])
|
||||
|
||||
|
||||
# check if there's enough datum information given
|
||||
if len(ValueList) != 3:
|
||||
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
|
||||
|
@ -74,6 +74,8 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
TAB_DSC_DEFINES_MAKEFILE_NAME : "_MakefileName",
|
||||
TAB_DSC_DEFINES_BS_BASE_ADDRESS : "_BsBaseAddress",
|
||||
TAB_DSC_DEFINES_RT_BASE_ADDRESS : "_RtBaseAddress",
|
||||
#TAB_DSC_DEFINES_RFC_LANGUAGES : "_RFCLanguages",
|
||||
#TAB_DSC_DEFINES_ISO_LANGUAGES : "_ISOLanguages",
|
||||
}
|
||||
|
||||
# used to compose dummy library class name for those forced library instances
|
||||
@ -140,6 +142,8 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
self._Pcds = None
|
||||
self._BuildOptions = None
|
||||
self._LoadFixAddress = None
|
||||
self._RFCLanguages = None
|
||||
self._ISOLanguages = None
|
||||
self._VpdToolGuid = None
|
||||
|
||||
## Get architecture
|
||||
@ -194,6 +198,35 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
self._SkuName = Record[1]
|
||||
elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
|
||||
self._LoadFixAddress = Record[1]
|
||||
elif Name == TAB_DSC_DEFINES_RFC_LANGUAGES:
|
||||
if not Record[1] or Record[1][0] != '"' or Record[1][-1] != '"' or len(Record[1]) == 1:
|
||||
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'language code for RFC_LANGUAGES must have double quotes around it, for example: RFC_LANGUAGES = "en-us;zh-hans"',
|
||||
File=self.MetaFile, Line=Record[-1])
|
||||
LanguageCodes = Record[1][1:-1]
|
||||
if not LanguageCodes:
|
||||
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more RFC4646 format language code must be provided for RFC_LANGUAGES statement',
|
||||
File=self.MetaFile, Line=Record[-1])
|
||||
LanguageList = GetSplitValueList(LanguageCodes, TAB_SEMI_COLON_SPLIT)
|
||||
# check whether there is empty entries in the list
|
||||
if None in LanguageList:
|
||||
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more empty language code is in RFC_LANGUAGES statement',
|
||||
File=self.MetaFile, Line=Record[-1])
|
||||
self._RFCLanguages = LanguageList
|
||||
elif Name == TAB_DSC_DEFINES_ISO_LANGUAGES:
|
||||
if not Record[1] or Record[1][0] != '"' or Record[1][-1] != '"' or len(Record[1]) == 1:
|
||||
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'language code for ISO_LANGUAGES must have double quotes around it, for example: ISO_LANGUAGES = "engchn"',
|
||||
File=self.MetaFile, Line=Record[-1])
|
||||
LanguageCodes = Record[1][1:-1]
|
||||
if not LanguageCodes:
|
||||
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more ISO639-2 format language code must be provided for ISO_LANGUAGES statement',
|
||||
File=self.MetaFile, Line=Record[-1])
|
||||
if len(LanguageCodes)%3:
|
||||
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'bad ISO639-2 format for ISO_LANGUAGES',
|
||||
File=self.MetaFile, Line=Record[-1])
|
||||
LanguageList = []
|
||||
for i in range(0, len(LanguageCodes), 3):
|
||||
LanguageList.append(LanguageCodes[i:i+3])
|
||||
self._ISOLanguages = LanguageList
|
||||
elif Name == TAB_DSC_DEFINES_VPD_TOOL_GUID:
|
||||
#
|
||||
# try to convert GUID to a real UUID value to see whether the GUID is format
|
||||
@ -339,6 +372,24 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
self._LoadFixAddress = ''
|
||||
return self._LoadFixAddress
|
||||
|
||||
## Retrieve RFCLanguage filter
|
||||
def _GetRFCLanguages(self):
|
||||
if self._RFCLanguages == None:
|
||||
if self._Header == None:
|
||||
self._GetHeaderInfo()
|
||||
if self._RFCLanguages == None:
|
||||
self._RFCLanguages = []
|
||||
return self._RFCLanguages
|
||||
|
||||
## Retrieve ISOLanguage filter
|
||||
def _GetISOLanguages(self):
|
||||
if self._ISOLanguages == None:
|
||||
if self._Header == None:
|
||||
self._GetHeaderInfo()
|
||||
if self._ISOLanguages == None:
|
||||
self._ISOLanguages = []
|
||||
return self._ISOLanguages
|
||||
|
||||
## Retrieve the GUID string for VPD tool
|
||||
def _GetVpdToolGuid(self):
|
||||
if self._VpdToolGuid == None:
|
||||
@ -588,13 +639,10 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
PcdDict[Arch, PcdCName, TokenSpaceGuid] = Setting
|
||||
# Remove redundant PCD candidates
|
||||
for PcdCName, TokenSpaceGuid in PcdSet:
|
||||
ValueList = ['', '', '']
|
||||
Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
|
||||
if Setting == None:
|
||||
continue
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
PcdValue, DatumType, MaxDatumSize = ValueList
|
||||
PcdValue, DatumType, MaxDatumSize = AnalyzePcdData(Setting)
|
||||
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
|
||||
PcdCName,
|
||||
TokenSpaceGuid,
|
||||
@ -622,22 +670,20 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
# PCD settings for certain ARCH and SKU
|
||||
#
|
||||
PcdDict = tdict(True, 4)
|
||||
PcdSet = set()
|
||||
PcdList = []
|
||||
# Find out all possible PCD candidates for self._Arch
|
||||
RecordList = self._RawData[Type, self._Arch]
|
||||
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
|
||||
PcdSet.add((PcdCName, TokenSpaceGuid))
|
||||
PcdList.append((PcdCName, TokenSpaceGuid))
|
||||
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
|
||||
# Remove redundant PCD candidates, per the ARCH and SKU
|
||||
for PcdCName, TokenSpaceGuid in PcdSet:
|
||||
ValueList = ['', '', '']
|
||||
for PcdCName, TokenSpaceGuid in PcdList:
|
||||
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
|
||||
if Setting == None:
|
||||
continue
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
PcdValue, DatumType, MaxDatumSize = ValueList
|
||||
|
||||
|
||||
PcdValue, DatumType, MaxDatumSize = AnalyzePcdData(Setting)
|
||||
|
||||
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', '', PcdValue)
|
||||
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
|
||||
PcdCName,
|
||||
@ -674,13 +720,10 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
|
||||
# Remove redundant PCD candidates, per the ARCH and SKU
|
||||
for PcdCName, TokenSpaceGuid in PcdSet:
|
||||
ValueList = ['', '', '', '']
|
||||
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
|
||||
if Setting == None:
|
||||
continue
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
VariableName, VariableGuid, VariableOffset, DefaultValue = ValueList
|
||||
VariableName, VariableGuid, VariableOffset, DefaultValue = AnalyzeHiiPcdData(Setting)
|
||||
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], VariableName, VariableGuid, VariableOffset, DefaultValue)
|
||||
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
|
||||
PcdCName,
|
||||
@ -709,27 +752,24 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
# PCD settings for certain ARCH and SKU
|
||||
#
|
||||
PcdDict = tdict(True, 4)
|
||||
PcdSet = set()
|
||||
PcdList = []
|
||||
# Find out all possible PCD candidates for self._Arch
|
||||
RecordList = self._RawData[Type, self._Arch]
|
||||
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
|
||||
PcdSet.add((PcdCName, TokenSpaceGuid))
|
||||
PcdList.append((PcdCName, TokenSpaceGuid))
|
||||
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
|
||||
# Remove redundant PCD candidates, per the ARCH and SKU
|
||||
for PcdCName, TokenSpaceGuid in PcdSet:
|
||||
ValueList = ['', '', '']
|
||||
for PcdCName, TokenSpaceGuid in PcdList:
|
||||
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
|
||||
if Setting == None:
|
||||
continue
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
#
|
||||
# For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
|
||||
# For the Integer & Boolean type, the optional data can only be InitialValue.
|
||||
# At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
|
||||
# until the DEC parser has been called.
|
||||
#
|
||||
VpdOffset, MaxDatumSize, InitialValue = ValueList
|
||||
VpdOffset, MaxDatumSize, InitialValue = AnalyzeVpdPcdData(Setting)
|
||||
|
||||
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset, InitialValue)
|
||||
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
|
||||
@ -790,6 +830,8 @@ class DscBuildData(PlatformBuildClassObject):
|
||||
BsBaseAddress = property(_GetBsBaseAddress)
|
||||
RtBaseAddress = property(_GetRtBaseAddress)
|
||||
LoadFixAddress = property(_GetLoadFixAddress)
|
||||
RFCLanguages = property(_GetRFCLanguages)
|
||||
ISOLanguages = property(_GetISOLanguages)
|
||||
VpdToolGuid = property(_GetVpdToolGuid)
|
||||
SkuIds = property(_GetSkuIds)
|
||||
Modules = property(_GetModules)
|
||||
@ -1083,7 +1125,6 @@ class DecBuildData(PackageBuildClassObject):
|
||||
PcdSet.add((PcdCName, TokenSpaceGuid))
|
||||
|
||||
for PcdCName, TokenSpaceGuid in PcdSet:
|
||||
ValueList = ['', '', '']
|
||||
#
|
||||
# limit the ARCH to self._Arch, if no self._Arch found, tdict
|
||||
# will automatically turn to 'common' ARCH and try again
|
||||
@ -1091,9 +1132,9 @@ class DecBuildData(PackageBuildClassObject):
|
||||
Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
|
||||
if Setting == None:
|
||||
continue
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
DefaultValue, DatumType, TokenNumber = ValueList
|
||||
|
||||
DefaultValue, DatumType, TokenNumber = AnalyzePcdData(Setting)
|
||||
|
||||
Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdClassObject(
|
||||
PcdCName,
|
||||
TokenSpaceGuid,
|
||||
@ -1920,11 +1961,11 @@ class InfBuildData(ModuleBuildClassObject):
|
||||
def _GetPcd(self, Type):
|
||||
Pcds = {}
|
||||
PcdDict = tdict(True, 4)
|
||||
PcdSet = set()
|
||||
PcdList = []
|
||||
RecordList = self._RawData[Type, self._Arch, self._Platform]
|
||||
for TokenSpaceGuid, PcdCName, Setting, Arch, Platform, Dummy1, LineNo in RecordList:
|
||||
PcdDict[Arch, Platform, PcdCName, TokenSpaceGuid] = (Setting, LineNo)
|
||||
PcdSet.add((PcdCName, TokenSpaceGuid))
|
||||
PcdList.append((PcdCName, TokenSpaceGuid))
|
||||
# get the guid value
|
||||
if TokenSpaceGuid not in self.Guids:
|
||||
Value = GuidValue(TokenSpaceGuid, self.Packages)
|
||||
@ -1936,13 +1977,11 @@ class InfBuildData(ModuleBuildClassObject):
|
||||
self.Guids[TokenSpaceGuid] = Value
|
||||
|
||||
# resolve PCD type, value, datum info, etc. by getting its definition from package
|
||||
for PcdCName, TokenSpaceGuid in PcdSet:
|
||||
ValueList = ['', '']
|
||||
for PcdCName, TokenSpaceGuid in PcdList:
|
||||
Setting, LineNo = PcdDict[self._Arch, self.Platform, PcdCName, TokenSpaceGuid]
|
||||
if Setting == None:
|
||||
continue
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
ValueList = AnalyzePcdData(Setting)
|
||||
DefaultValue = ValueList[0]
|
||||
Pcd = PcdClassObject(
|
||||
PcdCName,
|
||||
@ -1980,6 +2019,64 @@ class InfBuildData(ModuleBuildClassObject):
|
||||
PcdInPackage = Package.Pcds[PcdCName, TokenSpaceGuid, PcdType]
|
||||
Pcd.Type = PcdType
|
||||
Pcd.TokenValue = PcdInPackage.TokenValue
|
||||
|
||||
#
|
||||
# Check whether the token value exist or not.
|
||||
#
|
||||
if Pcd.TokenValue == None or Pcd.TokenValue == "":
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdCName, str(Package)),
|
||||
File =self.MetaFile, Line=LineNo,
|
||||
ExtraData=None
|
||||
)
|
||||
#
|
||||
# Check hexadecimal token value length and format.
|
||||
#
|
||||
if Pcd.TokenValue.startswith("0x") or Pcd.TokenValue.startswith("0X"):
|
||||
if len(Pcd.TokenValue) < 3 or len(Pcd.TokenValue) > 10:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
|
||||
File =self.MetaFile, Line=LineNo,
|
||||
ExtraData=None
|
||||
)
|
||||
try:
|
||||
int (Pcd.TokenValue, 16)
|
||||
except:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
|
||||
File =self.MetaFile, Line=LineNo,
|
||||
ExtraData=None
|
||||
)
|
||||
|
||||
#
|
||||
# Check decimal token value length and format.
|
||||
#
|
||||
else:
|
||||
try:
|
||||
TokenValueInt = int (Pcd.TokenValue, 10)
|
||||
if (TokenValueInt < 0 or TokenValueInt > 4294967295):
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!"% (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
|
||||
File =self.MetaFile, Line=LineNo,
|
||||
ExtraData=None
|
||||
)
|
||||
except:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!"% (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
|
||||
File =self.MetaFile, Line=LineNo,
|
||||
ExtraData=None
|
||||
)
|
||||
|
||||
Pcd.DatumType = PcdInPackage.DatumType
|
||||
Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize
|
||||
Pcd.InfDefaultValue = Pcd.DefaultValue
|
||||
@ -1989,12 +2086,13 @@ class InfBuildData(ModuleBuildClassObject):
|
||||
else:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
PARSER_ERROR,
|
||||
FORMAT_INVALID,
|
||||
"PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdCName, self.MetaFile),
|
||||
File =self.MetaFile, Line=LineNo,
|
||||
ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages])
|
||||
)
|
||||
Pcds[PcdCName, TokenSpaceGuid] = Pcd
|
||||
|
||||
return Pcds
|
||||
|
||||
Arch = property(_GetArch, _SetArch)
|
||||
|
@ -999,7 +999,7 @@ class Build():
|
||||
EdkLogger.error("build", FILE_DELETE_FAILURE, ExtraData=str(X))
|
||||
return True
|
||||
|
||||
## Rebase module image and Get function address for the inpug module list.
|
||||
## Rebase module image and Get function address for the input module list.
|
||||
#
|
||||
def _RebaseModule (self, MapBuffer, BaseAddress, ModuleList, AddrIsOffset = True, ModeIsSmm = False):
|
||||
if ModeIsSmm:
|
||||
@ -1786,7 +1786,8 @@ def Main():
|
||||
else:
|
||||
GlobalData.gIsWindows = False
|
||||
|
||||
EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[%s]\n" % platform.platform())
|
||||
EdkLogger.quiet("Build environment: %s" % platform.platform())
|
||||
EdkLogger.quiet(time.strftime("Build start time: %H:%M:%S, %b.%d %Y\n", time.localtime()));
|
||||
ReturnCode = 0
|
||||
MyBuild = None
|
||||
try:
|
||||
@ -1918,8 +1919,9 @@ def Main():
|
||||
MyBuild.BuildReport.GenerateReport(BuildDuration)
|
||||
MyBuild.Db.Close()
|
||||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
EdkLogger.quiet("\n- %s -\n%s [%s]" % (Conclusion, time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
|
||||
|
||||
EdkLogger.quiet("\n- %s -" % Conclusion)
|
||||
EdkLogger.quiet(time.strftime("Build end time: %H:%M:%S, %b.%d %Y", time.localtime()))
|
||||
EdkLogger.quiet("Build total time: %s\n" % BuildDuration)
|
||||
return ReturnCode
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
Loading…
x
Reference in New Issue
Block a user