Sync EDKII BaseTools to BaseTools project r2042.

git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@10850 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
qhuang8 2010-09-06 01:58:00 +00:00
parent 034ffda8b2
commit e56468c072
57 changed files with 1147 additions and 366 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1 @@
RunToolFromSource

View File

@ -295,7 +295,7 @@
$(DEBUG_DIR)(+)$(MODULE_NAME).efi
<Command.MSFT, Command.INTEL, Command.RVCT, Command.ARMGCC>
GenFw -e $(MODULE_TYPE) -o ${dst} ${src}
"$(GENFW)" -e $(MODULE_TYPE) -o ${dst} ${src} $(GENFW_FLAGS)
$(CP) ${dst} $(OUTPUT_DIR)
$(CP) ${dst} $(BIN_DIR)
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
@ -304,7 +304,7 @@
$(OBJCOPY) --only-keep-debug ${src} $(BIN_DIR)(+)$(MODULE_NAME).debug
$(OBJCOPY) --strip-unneeded ${src}
$(OBJCOPY) --add-gnu-debuglink=$(BIN_DIR)(+)$(MODULE_NAME).debug ${src}
GenFw -e $(MODULE_TYPE) -o ${dst} ${src}
"$(GENFW)" -e $(MODULE_TYPE) -o ${dst} ${src} $(GENFW_FLAGS)
$(CP) ${dst} $(OUTPUT_DIR)
$(CP) ${dst} $(BIN_DIR)
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
@ -314,7 +314,7 @@
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) ${src} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff
# create symbol file for GDB debug
-$(DSYMUTIL) ${src}
GenFw -e $(MODULE_TYPE) -o ${dst} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff
"$(GENFW)" -e $(MODULE_TYPE) -o ${dst} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff $(GENFW_FLAGS)
$(CP) ${dst} $(OUTPUT_DIR)
$(CP) ${dst} $(BIN_DIR)
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
@ -367,12 +367,12 @@
<Command.MSFT, Command.INTEL>
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
<Command.GCC>
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
[Acpi-Table-Code-File]
<InputFile>
@ -387,18 +387,18 @@
<Command.MSFT, Command.INTEL>
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
<Command.GCC>
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
<Command.XCODE>
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi $(GENFW_FLAGS)
[Masm16-Code-File]
@ -438,7 +438,7 @@
$(OUTPUT_DIR)(+)${s_base}.mcb
<Command>
GenFw -o ${dst} -m ${src}
"$(GENFW)" -o ${dst} -m ${src} $(GENFW_FLAGS)
[Microcode-Binary-File]
<InputFile>
@ -451,7 +451,7 @@
$(OUTPUT_DIR)(+)$(MODULE_NAME).bin
<Command>
GenFw -o ${dst} -j $(MICROCODE_BINARY_FILES)
"$(GENFW)" -o ${dst} -j $(MICROCODE_BINARY_FILES) $(GENFW_FLAGS)
-$(CP) ${dst} $(BIN_DIR)
[EFI-Image-File]
@ -518,9 +518,9 @@
$(OUTPUT_DIR)(+)$(MODULE_NAME)hii.lib
<Command.MSFT, Command.INTEL>
GenFw -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiipackage $(HII_BINARY_PACKAGES)
"$(GENFW)" -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiipackage $(HII_BINARY_PACKAGES) $(GENFW_FLAGS)
"$(RC)" /Fo${dst} $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc
<Command.GCC>
GenFw -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiibinpackage $(HII_BINARY_PACKAGES)
"$(GENFW)" -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiibinpackage $(HII_BINARY_PACKAGES) $(GENFW_FLAGS)
"$(RC)" $(RC_FLAGS) $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc ${dst}

View File

@ -63,10 +63,6 @@ TOOL_CHAIN_TAG = MYTOOLS
# cores or CPUs. Less than 2 means disable multithread build.
MAX_CONCURRENT_THREAD_NUMBER = 1
# MULTIPLE_THREAD BOOLEAN Optional If "Enable", multi-thread is enable for bulding.
# If "Disable", multi-thread is disable for building.
MULTIPLE_THREAD = Disable
# Build rules definition
#
#

View File

@ -86,8 +86,8 @@ DEFINE ICC11_ASM32x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERS
DEFINE ICC11_BINX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
DEFINE ICC11_ASMX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
DEFINE ICC11_BINX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
DEFINE ICC11_ASMX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
DEFINE ICC11_BINX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\intel64
DEFINE ICC11_ASMX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\intel64
DEFINE ICC11_BIN64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64
DEFINE ICC11_BIN64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64
@ -3308,8 +3308,8 @@ RELEASE_XCODE32_ARM_ASM_FLAGS = $(ARCHASM_FLAGS)
*_XCODE32_ARM_PP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -E -x assembler-with-cpp -include $(DEST_DIR_DEBUG)/AutoGen.h
*_XCODE32_ARM_VFRPP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -x c -E -P -DVFRCOMPILE --include $(DEST_DIR_DEBUG)/$(MODULE_NAME)StrDefs.h
DEBUG_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -g -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -mdynamic-no-pic -fno-stack-protector
RELEASE_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -mdynamic-no-pic -fno-stack-protector
DEBUG_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -g -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -fno-stack-protector
RELEASE_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -fno-stack-protector
####################################################################################
@ -3481,6 +3481,12 @@ RELEASE_ARMGCC_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mfpu=fpa -mlitt
*_*_*_OPTROM_PATH = EfiRom
*_*_*_OPTROM_FLAGS = -e
##################
# GenFw tool definitions
##################
*_*_*_GENFW_PATH = GenFw
*_*_*_GENFW_FLAGS =
##################
# Asl Compiler definitions
##################
@ -3501,3 +3507,8 @@ RELEASE_ARMGCC_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mfpu=fpa -mlitt
*_*_*_TIANO_PATH = TianoCompress
*_*_*_TIANO_GUID = A31280AD-481E-41B6-95E8-127F4C984779
##################
# BPDG tool definitions
##################
*_*_*_VPDTOOL_PATH = BPDG
*_*_*_VPDTOOL_GUID = 8C3D856A-9BE6-468E-850A-24F7A8D38E08

View File

@ -44,14 +44,11 @@ LFLAGS =
# to x86_64. So make sure tools match uname -m
#
uname_s = $(shell uname -s)
uname_m = $(shell uname -m)
ifeq ($(uname_s),Darwin)
ifeq ($(uname_m),i386)
CFLAGS += -arch i386
CPPFLAGS += -arch i386
LFLAGS += -arch i386
endif
endif
.PHONY: all
.PHONY: install

View File

@ -34,6 +34,7 @@ import Common.GlobalData as GlobalData
from GenFds.FdfParser import *
from CommonDataClass.CommonClass import SkuInfoClass
from Workspace.BuildClassObject import *
import Common.VpdInfoFile as VpdInfoFile
## Regular expression for splitting Dependency Expression stirng into tokens
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
@ -244,7 +245,7 @@ class WorkspaceAutoGen(AutoGen):
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
return self._BuildCommand
## Create makefile for the platform and mdoules in it
## Create makefile for the platform and modules in it
#
# @param CreateDepsMakeFile Flag indicating if the makefile for
# modules will be created as well
@ -476,9 +477,16 @@ class PlatformAutoGen(AutoGen):
UnicodePcdArray = []
HiiPcdArray = []
OtherPcdArray = []
VpdFile = VpdInfoFile.VpdInfoFile()
NeedProcessVpdMapFile = False
if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
Sku.VpdOffset = Sku.VpdOffset.strip()
PcdValue = Sku.DefaultValue
if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
@ -488,6 +496,121 @@ class PlatformAutoGen(AutoGen):
HiiPcdArray.append(Pcd)
else:
OtherPcdArray.append(Pcd)
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
#
# Fix the optional data of VPD PCD.
#
if (Pcd.DatumType.strip() != "VOID*"):
if Sku.DefaultValue == '':
Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue = Pcd.MaxDatumSize
Pcd.MaxDatumSize = None
else:
EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
File=self.MetaFile,
ExtraData="\n\tPCD: %s.%s format incorrect in DSC: %s\n\t\t\n"
% (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path))
VpdFile.Add(Pcd, Sku.VpdOffset)
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
NeedProcessVpdMapFile = True
#
# Fix the PCDs define in VPD PCD section that never referenced by module.
# An example is PCD for signature usage.
#
for DscPcd in self.Platform.Pcds:
DscPcdEntry = self.Platform.Pcds[DscPcd]
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
FoundFlag = False
for VpdPcd in VpdFile._VpdArray.keys():
# This PCD has been referenced by module
if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
(VpdPcd.TokenCName == DscPcdEntry.TokenCName):
FoundFlag = True
# Not found, it should be signature
if not FoundFlag :
# just pick the a value to determine whether is unicode string type
Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]
Sku.VpdOffset = Sku.VpdOffset.strip()
# Need to iterate DEC pcd information to get the value & datumtype
for eachDec in self.PackageList:
for DecPcd in eachDec.Pcds:
DecPcdEntry = eachDec.Pcds[DecPcd]
if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
(DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
DscPcdEntry.DatumType = DecPcdEntry.DatumType
DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
Sku.DefaultValue = DecPcdEntry.DefaultValue
VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
NeedProcessVpdMapFile = True
if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
VpdFile.GetCount() != 0:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
if VpdFile.GetCount() != 0:
WorkspaceDb = self.BuildDatabase.WorkspaceDb
DscTimeStamp = WorkspaceDb.GetTimeStamp(WorkspaceDb.GetFileId(str(self.Platform.MetaFile)))
FvPath = os.path.join(self.BuildDir, "FV")
if not os.path.exists(FvPath):
try:
os.makedirs(FvPath)
except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
VpdFileName = self.Platform.VpdFileName
if VpdFileName == None or VpdFileName == "" :
VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
else :
VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName)
if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
VpdFile.Write(VpdFilePath)
# retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
BPDGToolName = None
for ToolDef in self.ToolDefinition.values():
if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:
if not ToolDef.has_key("PATH"):
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
BPDGToolName = ToolDef["PATH"]
break
# Call third party GUID BPDG tool.
if BPDGToolName != None:
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath, VpdFileName)
else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
# Process VPD map file generated by third party BPDG tool
if NeedProcessVpdMapFile:
if VpdFileName == None or VpdFileName == "" :
VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
else :
VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)
if os.path.exists(VpdMapFilePath):
VpdFile.Read(VpdMapFilePath)
# Fixup "*" offset
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
if Sku.VpdOffset == "*":
Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]
else:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
# Delete the DynamicPcdList At the last time enter into this function
del self._DynamicPcdList[:]
self._DynamicPcdList.extend(UnicodePcdArray)
self._DynamicPcdList.extend(HiiPcdArray)
@ -709,10 +832,14 @@ class PlatformAutoGen(AutoGen):
## Get list of non-dynamic PCDs
def _GetNonDynamicPcdList(self):
if self._NonDynamicPcdList == None:
self.CollectPlatformDynamicPcds()
return self._NonDynamicPcdList
## Get list of dynamic PCDs
def _GetDynamicPcdList(self):
if self._DynamicPcdList == None:
self.CollectPlatformDynamicPcds()
return self._DynamicPcdList
## Generate Token Number for all PCD
@ -952,6 +1079,10 @@ class PlatformAutoGen(AutoGen):
if FromPcd != None:
if ToPcd.Pending and FromPcd.Type not in [None, '']:
ToPcd.Type = FromPcd.Type
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type):
if ToPcd.Type.strip() == "DynamicEx":
ToPcd.Type = FromPcd.Type
elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \
and ToPcd.Type != FromPcd.Type:
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",

View File

@ -1028,7 +1028,9 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
ArraySize = ArraySize / 2;
if ArraySize < (len(Value) + 1):
ArraySize = len(Value) + 1
EdkLogger.error("build", AUTOGEN_ERROR,
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
ExtraData="[%s]" % str(Info))
Value = NewValue + '0 }'
Array = '[%d]' % ArraySize
#
@ -1262,10 +1264,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
VariableHeadValueList = []
Pcd.InitString = 'UNINIT'
if Pcd.Type in ["DynamicVpd", "DynamicExVpd"]:
Pcd.TokenTypeList = ['PCD_TYPE_VPD']
elif Pcd.DatumType == 'VOID*':
if Pcd.DatumType == 'VOID*':
if Pcd.Type not in ["DynamicVpd", "DynamicExVpd"]:
Pcd.TokenTypeList = ['PCD_TYPE_STRING']
else:
Pcd.TokenTypeList = []
elif Pcd.DatumType == 'BOOLEAN':
Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8']
else:
@ -1364,7 +1367,10 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(Pcd.MaxDatumSize)
if Pcd.MaxDatumSize != '':
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
if MaxDatumSize > Size:
if MaxDatumSize < Size:
EdkLogger.error("build", AUTOGEN_ERROR,
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
ExtraData="[%s]" % str(Platform))
Size = MaxDatumSize
Dict['STRING_TABLE_LENGTH'].append(Size)
StringTableIndex += 1

View File

@ -334,6 +334,7 @@ TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
TAB_DEC_DEFINES_PKG_UNI_FILE = 'PKG_UNI_FILE'
#
# Dsc Definitions
@ -353,6 +354,8 @@ TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'
TAB_DSC_DEFINES_VPD_FILENAME = 'VPD_FILENAME'
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
#

View File

@ -719,7 +719,7 @@ class TemplateString(object):
while Template:
MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
if not MatchObj:
if MatchEnd < len(Template):
if MatchEnd <= len(Template):
TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
TemplateSectionList.append(TemplateSection)
break

View File

@ -296,6 +296,50 @@ def CleanString(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppSty
return Line
## CleanString2
#
# Split comments in a string
# Remove spaces
#
# @param Line: The string to be cleaned
# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
#
# @retval Path Formatted path
#
def CleanString2(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
#
# remove whitespace
#
Line = Line.strip();
#
# Replace R8's comment character
#
if AllowCppStyleComment:
Line = Line.replace(DataType.TAB_COMMENT_R8_SPLIT, CommentCharacter)
#
# separate comments and statements
#
LineParts = Line.split(CommentCharacter, 1);
#
# remove whitespace again
#
Line = LineParts[0].strip();
if len(LineParts) > 1:
Comment = LineParts[1].strip()
# Remove prefixed and trailing comment characters
Start = 0
End = len(Comment)
while Start < End and Comment.startswith(CommentCharacter, Start, End):
Start += 1
while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
End -= 1
Comment = Comment[Start:End]
Comment = Comment.strip()
else:
Comment = ''
return Line, Comment
## GetMultipleValuesOfKeyFromLines
#
# Parse multiple strings to clean comment and spaces

View File

@ -0,0 +1,259 @@
## @file
#
# This package manage the VPD PCD information file which will be generated
# by build tool's autogen.
# The VPD PCD information file will be input for third-party BPDG tool which
# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
#
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
import os
import re
import Common.EdkLogger as EdkLogger
import Common.BuildToolError as BuildToolError
import subprocess
FILE_COMMENT_TEMPLATE = \
"""
## @file
#
# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
#
# This file lists all VPD informations for a platform collected by build.exe.
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
"""
## The class manage VpdInfoFile.
#
# This file contains an ordered (based on position in the DSC file) list of the PCDs specified in the platform description file (DSC). The Value field that will be assigned to the PCD comes from the DSC file, INF file (if not defined in the DSC file) or the DEC file (if not defined in the INF file). This file is used as an input to the BPDG tool.
# Format for this file (using EBNF notation) is:
# <File> :: = [<CommentBlock>]
# [<PcdEntry>]*
# <CommentBlock> ::= ["#" <String> <EOL>]*
# <PcdEntry> ::= <PcdName> "|" <Offset> "|" <Size> "|" <Value> <EOL>
# <PcdName> ::= <TokenSpaceCName> "." <PcdCName>
# <TokenSpaceCName> ::= C Variable Name of the Token Space GUID
# <PcdCName> ::= C Variable Name of the PCD
# <Offset> ::= {"*"} {<HexNumber>}
# <HexNumber> ::= "0x" (a-fA-F0-9){1,8}
# <Size> ::= <HexNumber>
# <Value> ::= {<HexNumber>} {<NonNegativeInt>} {<QString>} {<Array>}
# <NonNegativeInt> ::= (0-9)+
# <QString> ::= ["L"] <DblQuote> <String> <DblQuote>
# <DblQuote> ::= 0x22
# <Array> ::= {<CArray>} {<NList>}
# <CArray> ::= "{" <HexNumber> ["," <HexNumber>]* "}"
# <NList> ::= <HexNumber> ["," <HexNumber>]*
#
class VpdInfoFile:
## The mapping dictionary from datum type to size string.
_MAX_SIZE_TYPE = {"BOOLEAN":"1", "UINT8":"1", "UINT16":"2", "UINT32":"4", "UINT64":"8"}
_rVpdPcdLine = None
## Constructor
def __init__(self):
## Dictionary for VPD in following format
#
# Key : PcdClassObject instance.
# @see BuildClassObject.PcdClassObject
# Value : offset in different SKU such as [sku1_offset, sku2_offset]
self._VpdArray = {}
## Add a VPD PCD collected from platform's autogen when building.
#
# @param vpds The list of VPD PCD collected for a platform.
# @see BuildClassObject.PcdClassObject
#
# @param offset integer value for VPD's offset in specific SKU.
#
def Add(self, Vpd, Offset):
if (Vpd == None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= 0 or Offset == "*"):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
if Vpd.DatumType == "VOID*":
if Vpd.MaxDatumSize <= 0:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]:
if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":
Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]
else:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid DatumType %s for VPD PCD %s.%s" % (Vpd.DatumType, Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
if Vpd not in self._VpdArray.keys():
#
# If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
#
self._VpdArray[Vpd] = [Offset]
else:
#
# If there is an offset for a specific SKU in dict, then append this offset for other sku to array.
#
self._VpdArray[Vpd].append(Offset)
## Generate VPD PCD information into a text file
#
# If parameter FilePath is invalid, then assert.
# If
# @param FilePath The given file path which would hold VPD information
def Write(self, FilePath):
if not (FilePath != None or len(FilePath) != 0):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid parameter FilePath: %s." % FilePath)
try:
fd = open(FilePath, "w")
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_OPEN_FAILURE,
"Fail to open file %s for written." % FilePath)
try:
# write file header
fd.write(FILE_COMMENT_TEMPLATE)
# write each of PCD in VPD type
for Pcd in self._VpdArray.keys():
for Offset in self._VpdArray[Pcd]:
PcdValue = str(Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue).strip()
if PcdValue == "" :
PcdValue = Pcd.DefaultValue
fd.write("%s.%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, str(Offset).strip(), str(Pcd.MaxDatumSize).strip(),PcdValue))
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_WRITE_FAILURE,
"Fail to write file %s" % FilePath)
fd.close()
## Read an existing VPD PCD info file.
#
# This routine will read VPD PCD information from existing file and construct
# internal PcdClassObject array.
# This routine could be used by third-party tool to parse VPD info file content.
#
# @param FilePath The full path string for existing VPD PCD info file.
def Read(self, FilePath):
try:
fd = open(FilePath, "r")
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_OPEN_FAILURE,
"Fail to open file %s for written." % FilePath)
Lines = fd.readlines()
for Line in Lines:
Line = Line.strip()
if len(Line) == 0 or Line.startswith("#"):
continue
#
# the line must follow output format defined in BPDG spec.
#
try:
PcdName, Offset, Size, Value = Line.split("#")[0].split("|")
TokenSpaceName, PcdTokenName = PcdName.split(".")
except:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
Found = False
for VpdObject in self._VpdArray.keys():
if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObject.TokenCName == PcdTokenName.strip():
if self._VpdArray[VpdObject][0] == "*":
if Offset == "*":
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
self._VpdArray[VpdObject][0] = Offset
Found = True
break
if not Found:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
## Get count of VPD PCD collected from platform's autogen when building.
#
# @return The integer count value
def GetCount(self):
Count = 0
for OffsetList in self._VpdArray.values():
Count += len(OffsetList)
return Count
## Get an offset value for a given VPD PCD
#
# Because BPDG only support one Sku, so only return offset for SKU default.
#
# @param vpd A given VPD PCD
def GetOffset(self, vpd):
if not self._VpdArray.has_key(vpd):
return None
if len(self._VpdArray[vpd]) == 0:
return None
return self._VpdArray[vpd]
## Call external BPDG tool to process VPD file
#
# @param ToolPath The string path name for BPDG tool
# @param VpdFileName The string path name for VPD information guid.txt
#
def CallExtenalBPDGTool(ToolPath, VpdFilePath, VpdFileName):
assert ToolPath != None, "Invalid parameter ToolPath"
assert VpdFilePath != None and os.path.exists(VpdFilePath), "Invalid parameter VpdFileName"
OutputDir = os.path.dirname(VpdFilePath)
if (VpdFileName == None or VpdFileName == "") :
FileName = os.path.basename(VpdFilePath)
BaseName, ext = os.path.splitext(FileName)
OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
else :
OutputMapFileName = os.path.join(OutputDir, "%s.map" % VpdFileName)
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % VpdFileName)
try:
PopenObject = subprocess.Popen([ToolPath,
'-o', OutputBinFileName,
'-m', OutputMapFileName,
'-s',
'-f',
'-v',
VpdFilePath],
stdout=subprocess.PIPE,
stderr= subprocess.PIPE)
except Exception, X:
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))
(out, error) = PopenObject.communicate()
print out
while PopenObject.returncode == None :
PopenObject.wait()
if PopenObject.returncode != 0:
if PopenObject.returncode != 0:
EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
(PopenObject.returncode, str(error)))
return PopenObject.returncode

View File

@ -29,6 +29,7 @@ MODEL_FILE_DSC = 1013
MODEL_FILE_FDF = 1014
MODEL_FILE_INC = 1015
MODEL_FILE_CIF = 1016
MODEL_FILE_OTHERS = 1099
MODEL_IDENTIFIER_FILE_HEADER = 2001
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
@ -91,6 +92,8 @@ MODEL_META_DATA_NMAKE = 5012
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
MODEL_META_DATA_COMMENT = 5016
MODEL_META_DATA_GLOBAL_DEFINE = 5017
MODEL_EXTERNAL_DEPENDENCY = 10000
@ -103,6 +106,8 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
('MODEL_FILE_DSC', MODEL_FILE_DSC),
('MODEL_FILE_FDF', MODEL_FILE_FDF),
('MODEL_FILE_INC', MODEL_FILE_INC),
('MODEL_FILE_CIF', MODEL_FILE_CIF),
('MODEL_FILE_OTHERS', MODEL_FILE_OTHERS),
('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
@ -159,7 +164,8 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE)
('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE),
('MODEL_META_DATA_COMMENT', MODEL_META_DATA_COMMENT)
]
## FunctionClass

View File

@ -30,6 +30,7 @@ class Check(object):
# Check all required checkpoints
def Check(self):
self.GeneralCheck()
self.MetaDataFileCheck()
self.DoxygenCheck()
self.IncludeFileCheck()
@ -38,6 +39,29 @@ class Check(object):
self.FunctionLayoutCheck()
self.NamingConventionCheck()
# General Checking
def GeneralCheck(self):
self.GeneralCheckNonAcsii()
# Check whether file has non ACSII char
def GeneralCheckNonAcsii(self):
if EccGlobalData.gConfig.GeneralCheckNonAcsii == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Non-ACSII char in file ...")
SqlCommand = """select ID, FullPath, ExtName from File"""
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
for Record in RecordSet:
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
op = open(Record[1]).readlines()
IndexOfLine = 0
for Line in op:
IndexOfLine += 1
IndexOfChar = 0
for Char in Line:
IndexOfChar += 1
if ord(Char) > 126:
OtherMsg = "File %s has Non-ASCII char at line %s column %s" %(Record[1], IndexOfLine, IndexOfChar)
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NON_ACSII, OtherMsg = OtherMsg, BelongsToTable = 'File', BelongsToItem = Record[0])
# C Function Layout Checking
def FunctionLayoutCheck(self):
self.FunctionLayoutCheckReturnType()
@ -67,10 +91,12 @@ class Check(object):
if EccGlobalData.gConfig.CFunctionLayoutCheckReturnType == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout return type ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c', '.h'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c', '.h'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutReturnType(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckFuncLayoutReturnType(FullName)
# Check whether any optional functional modifiers exist and next to the return type
@ -78,10 +104,12 @@ class Check(object):
if EccGlobalData.gConfig.CFunctionLayoutCheckOptionalFunctionalModifier == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout modifier ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c', '.h'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c', '.h'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutModifier(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckFuncLayoutModifier(FullName)
# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
@ -90,20 +118,26 @@ class Check(object):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionName == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function name ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c', '.h'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c', '.h'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutName(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckFuncLayoutName(FullName)
# Check whether the function prototypes in include files have the same form as function definitions
def FunctionLayoutCheckPrototype(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionPrototype == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function prototype ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[PROTOTYPE]" + FullName)
# c.CheckFuncLayoutPrototype(FullName)
for FullName in EccGlobalData.gCFileList:
EdkLogger.quiet("[PROTOTYPE]" + FullName)
c.CheckFuncLayoutPrototype(FullName)
@ -112,10 +146,12 @@ class Check(object):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionBody == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function body ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutBody(FullName)
for FullName in EccGlobalData.gCFileList:
c.CheckFuncLayoutBody(FullName)
# Check whether the data declarations is the first code in a module.
@ -125,10 +161,13 @@ class Check(object):
if EccGlobalData.gConfig.CFunctionLayoutCheckNoInitOfVariable == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout local variables ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutLocalVariable(FullName)
for FullName in EccGlobalData.gCFileList:
c.CheckFuncLayoutLocalVariable(FullName)
# Check whether no use of STATIC for functions
@ -150,10 +189,12 @@ class Check(object):
if EccGlobalData.gConfig.DeclarationDataTypeCheckNoUseCType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration No use C type ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# c.CheckDeclNoUseCType(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckDeclNoUseCType(FullName)
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
@ -161,10 +202,12 @@ class Check(object):
if EccGlobalData.gConfig.DeclarationDataTypeCheckInOutModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration argument modifier ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# c.CheckDeclArgModifier(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckDeclArgModifier(FullName)
# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
@ -177,10 +220,13 @@ class Check(object):
if EccGlobalData.gConfig.DeclarationDataTypeCheckEnumeratedType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration enum typedef ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[ENUM]" + FullName)
# c.CheckDeclEnumTypedef(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
EdkLogger.quiet("[ENUM]" + FullName)
c.CheckDeclEnumTypedef(FullName)
@ -189,10 +235,13 @@ class Check(object):
if EccGlobalData.gConfig.DeclarationDataTypeCheckStructureDeclaration == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration struct typedef ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[STRUCT]" + FullName)
# c.CheckDeclStructTypedef(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
EdkLogger.quiet("[STRUCT]" + FullName)
c.CheckDeclStructTypedef(FullName)
@ -223,10 +272,13 @@ class Check(object):
if EccGlobalData.gConfig.DeclarationDataTypeCheckUnionType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration union typedef ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[UNION]" + FullName)
# c.CheckDeclUnionTypedef(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
EdkLogger.quiet("[UNION]" + FullName)
c.CheckDeclUnionTypedef(FullName)
@ -241,10 +293,13 @@ class Check(object):
if EccGlobalData.gConfig.PredicateExpressionCheckBooleanValue == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression Boolean value ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[BOOLEAN]" + FullName)
# c.CheckBooleanValueComparison(FullName)
for FullName in EccGlobalData.gCFileList:
EdkLogger.quiet("[BOOLEAN]" + FullName)
c.CheckBooleanValueComparison(FullName)
@ -253,23 +308,31 @@ class Check(object):
if EccGlobalData.gConfig.PredicateExpressionCheckNonBooleanOperator == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression Non-Boolean variable...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
# c.CheckNonBooleanValueComparison(FullName)
for FullName in EccGlobalData.gCFileList:
EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
c.CheckNonBooleanValueComparison(FullName)
# Check whether a comparison of any pointer to zero must be done via the NULL type
def PredicateExpressionCheckComparisonNullType(self):
if EccGlobalData.gConfig.PredicateExpressionCheckComparisonNullType == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression NULL pointer ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[POINTER]" + FullName)
# c.CheckPointerNullComparison(FullName)
for FullName in EccGlobalData.gCFileList:
EdkLogger.quiet("[POINTER]" + FullName)
c.CheckPointerNullComparison(FullName)
# Include file checking
def IncludeFileCheck(self):
self.IncludeFileCheckIfndef()
@ -309,10 +372,12 @@ class Check(object):
if EccGlobalData.gConfig.IncludeFileCheckIfndefStatement == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking header file ifndef ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckHeaderFileIfndef(FullName)
for FullName in EccGlobalData.gHFileList:
MsgList = c.CheckHeaderFileIfndef(FullName)
# Check whether include files NOT contain code or define data variables
@ -320,10 +385,12 @@ class Check(object):
if EccGlobalData.gConfig.IncludeFileCheckData == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking header file data ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckHeaderFileData(FullName)
for FullName in EccGlobalData.gHFileList:
MsgList = c.CheckHeaderFileData(FullName)
# Doxygen document checking
@ -347,7 +414,8 @@ class Check(object):
MsgList = c.CheckFileHeaderDoxygenComments(FullName)
elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):
FullName = os.path.join(Dirpath, F)
if not open(FullName).read().startswith('## @file'):
op = open(FullName).readlines()
if not op[0].startswith('## @file') and op[6].startswith('## @file') and op[7].startswith('## @file'):
SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
@ -360,12 +428,15 @@ class Check(object):
if EccGlobalData.gConfig.DoxygenCheckFunctionHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen function header ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
# Check whether the first line of text in a comment block is a brief description of the element being documented.
# The brief description must end with a period.
def DoxygenCheckCommentDescription(self):
@ -377,10 +448,12 @@ class Check(object):
if EccGlobalData.gConfig.DoxygenCheckCommentFormat == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen comment ///< ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
@ -388,10 +461,12 @@ class Check(object):
if EccGlobalData.gConfig.DoxygenCheckCommand == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen command ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckDoxygenCommand(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
MsgList = c.CheckDoxygenCommand(FullName)
# Meta-Data File Processing Checking
@ -556,7 +631,6 @@ class Check(object):
SqlCommand2 = """select Name from File where ID = %s""" %Record[5]
DscFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand1)[0][0])[0]
FdfFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand2)[0][0])[0]
print DscFileName, 111, FdfFileName
if DscFileName != FdfFileName:
continue
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):
@ -680,8 +754,8 @@ class Check(object):
SqlCommand = """
select ID from File where FullPath in
(select B.Path || '\\' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s
and B.ID = %s)
""" %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile)
and B.ID = %s and (B.Model = %s or B.Model = %s))
""" %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile, MODEL_FILE_C, MODEL_FILE_H)
TableSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Tbl in TableSet:
TblName = 'Identifier' + str(Tbl[0])

View File

@ -77,6 +77,8 @@ class Configuration(object):
self.GeneralCheckCarriageReturn = 1
# Check whether the file exists
self.GeneralCheckFileExistence = 1
# Check whether file has non ACSII char
self.GeneralCheckNonAcsii = 1
## Space Checking
self.SpaceCheckAll = 1
@ -235,6 +237,9 @@ class Configuration(object):
# The directory listed here will not be parsed, split with ','
self.SkipDirList = []
# A list for binary file ext name
self.BinaryExtList = []
self.ParseConfig()
def ParseConfig(self):
@ -258,6 +263,8 @@ class Configuration(object):
continue
if List[0] == 'SkipDirList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
if List[0] == 'BinaryExtList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
self.__dict__[List[0]] = List[1]
def ShowMe(self):

View File

@ -171,10 +171,11 @@ class Database(object):
# Insert a record for file
#
FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
if File.Model == DataClass.MODEL_FILE_C or File.Model == DataClass.MODEL_FILE_H:
IdTable = TableIdentifier(self.Cur)
IdTable.Table = "Identifier%s" % FileID
IdTable.Create()
#
# Insert function of file
#

View File

@ -106,6 +106,8 @@ class Ecc(object):
self.BuildMetaDataFileDatabase()
EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)
EccGlobalData.gCFileList = GetFileList(MODEL_FILE_C, EccGlobalData.gDb)
EccGlobalData.gHFileList = GetFileList(MODEL_FILE_H, EccGlobalData.gDb)
## BuildMetaDataFileDatabase
#

View File

@ -21,4 +21,6 @@ gTarget = ''
gConfig = None
gDb = None
gIdentifierTableList = []
gCFileList = []
gHFileList = []
gException = None

View File

@ -19,6 +19,7 @@ ERROR_GENERAL_CHECK_NO_ASM = 1004
ERROR_GENERAL_CHECK_NO_PROGMA = 1005
ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006
ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007
ERROR_GENERAL_CHECK_NON_ACSII = 1008
ERROR_SPACE_CHECK_ALL = 2000
@ -105,6 +106,7 @@ gEccErrorMessage = {
ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",
ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",
ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",
ERROR_GENERAL_CHECK_NON_ACSII : "File has invalid Non-ACSII char",
ERROR_SPACE_CHECK_ALL : "",

View File

@ -49,6 +49,19 @@ def GetIncludeListOfFile(WorkSpace, Filepath, Db):
return IncludeList
## Get the file list
#
# Search table file and find all specific type files
#
def GetFileList(FileModel, Db):
FileList = []
SqlCommand = """select FullPath from File where Model = %s""" % str(FileModel)
RecordSet = Db.TblFile.Exec(SqlCommand)
for Record in RecordSet:
FileList.append(Record[0])
return FileList
## Get the table list
#
# Search table file and find all small tables

View File

@ -514,7 +514,9 @@ def CollectSourceCodeDataIntoDB(RootDir):
dirnames.append(Dirname)
for f in filenames:
collector = None
FullName = os.path.normpath(os.path.join(dirpath, f))
model = DataClass.MODEL_FILE_OTHERS
if os.path.splitext(f)[1] in ('.h', '.c'):
EdkLogger.info("Parsing " + FullName)
model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
@ -532,6 +534,7 @@ def CollectSourceCodeDataIntoDB(RootDir):
ModifiedTime = os.path.getmtime(FullName)
FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
FileObjList.append(FileObj)
if collector:
collector.CleanFileProfileBuffer()
if len(ParseErrorFileList) > 0:
@ -539,6 +542,7 @@ def CollectSourceCodeDataIntoDB(RootDir):
Db = GetDB()
for file in FileObjList:
if file.ExtName.upper() not in ['INF', 'DEC', 'DSC', 'FDF']:
Db.InsertOneFile(file)
Db.UpdateIdentifierBelongsToFunction()
@ -552,7 +556,6 @@ def GetTableID(FullFileName, ErrorMsgList = None):
from File
where FullPath like '%s'
""" % FullFileName
ResultSet = Db.TblFile.Exec(SqlStatement)
FileID = -1
@ -567,6 +570,8 @@ def GetTableID(FullFileName, ErrorMsgList = None):
return FileID
def GetIncludeFileList(FullFileName):
if os.path.splitext(FullFileName)[1].upper() not in ('.H'):
return []
IFList = IncludeFileListDict.get(FullFileName)
if IFList != None:
return IFList
@ -2301,21 +2306,32 @@ def CheckFileHeaderDoxygenComments(FullFileName):
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, ID
from %s
where Model = %d and StartLine = 1 and StartColumn = 0
where Model = %d and (StartLine = 1 or StartLine = 7 or StartLine = 8) and StartColumn = 0
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No Comment appear at the very beginning of file.', 'File', FileID)
return ErrorMsgList
IsFoundError1 = True
IsFoundError2 = True
IsFoundError3 = True
for Result in ResultSet:
CommentStr = Result[0]
if not CommentStr.startswith('/** @file'):
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, Result[1])
if not CommentStr.endswith('**/'):
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with **/', FileTable, Result[1])
if CommentStr.find('.') == -1:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period \'.\'', FileTable, Result[1])
CommentStr = Result[0].strip()
ID = Result[1]
if CommentStr.startswith('/** @file'):
IsFoundError1 = False
if CommentStr.endswith('**/'):
IsFoundError2 = False
if CommentStr.find('.') != -1:
IsFoundError3 = False
if IsFoundError1:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)
if IsFoundError2:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with ""**/""', FileTable, ID)
if IsFoundError3:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period "".""', FileTable, ID)
def CheckFuncHeaderDoxygenComments(FullFileName):
ErrorMsgList = []

View File

@ -68,6 +68,8 @@ GeneralCheckNoProgma = 1
GeneralCheckCarriageReturn = 1
# Check whether the file exists
GeneralCheckFileExistence = 1
# Check whether file has non ACSII char
GeneralCheckNonAcsii = 1
#
# Space Checking
@ -242,3 +244,6 @@ MetaDataFileCheckModuleFileGuidDuplication = 1
# GotoStatementCheckAll = 0
# SpellingCheckAll = 0
#
# A list for binary file ext name
BinaryExtList = EXE, EFI, FV, ROM, DLL, COM, BMP, GIF, PYD, CMP, BIN, JPG, UNI, RAW, COM2, LIB, DEPEX, SYS, DB

View File

@ -1,5 +1,5 @@
## @file
# Windows makefile for Python tools build.
# Linux makefile for Python tools build.
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials

View File

@ -751,7 +751,7 @@ class FdfParser:
raise Warning("Value %s is not a number", self.FileName, Line)
for Profile in AllMacroList:
if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
if Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
if Op == None:
if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
return False

View File

@ -80,7 +80,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
ErrorCode, ErrorInfo = PathClassObj.Validate()
ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
@ -343,7 +343,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if len(PlatformArchList) == 0:
self.InDsc = False
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
ErrorCode, ErrorInfo = PathClassObj.Validate()
ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
if len(ArchList) == 1:

View File

@ -172,6 +172,7 @@ def main():
"""call Workspace build create database"""
os.environ["WORKSPACE"] = Workspace
FdfParser.InputMacroDict["WORKSPACE"] = Workspace
BuildWorkSpace = WorkspaceDatabase(':memory:', FdfParser.InputMacroDict)
BuildWorkSpace.InitDatabase()

View File

@ -22,7 +22,7 @@ MODULES=encodings.cp437,encodings.gbk,encodings.utf_16,encodings.utf_8,encodings
BIN_DIR=$(EDK_TOOLS_PATH)\Bin\Win32
APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe $(BIN_DIR)\GenPatchPcdTable.exe $(BIN_DIR)\PatchPcdValue.exe
APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe $(BIN_DIR)\GenPatchPcdTable.exe $(BIN_DIR)\PatchPcdValue.exe $(BIN_DIR)\BPDG.exe
COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\Database.py \
@ -46,6 +46,7 @@ COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\String.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\TargetTxtClassObject.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\ToolDefClassObject.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\VpdInfoFile.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlParser.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlRoutines.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\__init__.py \
@ -100,6 +101,9 @@ $(BIN_DIR)\GenPatchPcdTable.exe: $(BASE_TOOLS_PATH)\Source\Python\GenPatchPcdTab
$(BIN_DIR)\PatchPcdValue.exe: $(BASE_TOOLS_PATH)\Source\Python\PatchPcdValue\PatchPcdValue.py $(COMMON_PYTHON)
@pushd . & @cd PatchPcdValue & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) PatchPcdValue.py & @popd
$(BIN_DIR)\BPDG.exe: $(BASE_TOOLS_PATH)\Source\Python\BPDG\BPDG.py $(COMMON_PYTHON)
@pushd . & @cd BPDG & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) BPDG.py & @popd
clean:
cleanall:
@del /f /q $(BIN_DIR)\*.pyd $(BIN_DIR)\*.dll

View File

@ -1,7 +1,7 @@
## @file
# This file is used to define each component of the build database
#
# Copyright (c) 2007 - 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@ -31,6 +31,7 @@ from Common.BuildToolError import *
# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
# @param GuidValue: Input value for TokenSpaceGuidValue of Pcd, default is None
#
# @var TokenCName: To store value for TokenCName
# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
@ -43,7 +44,7 @@ from Common.BuildToolError import *
# @var Phase: To store value for Phase, default is "DXE"
#
class PcdClassObject(object):
def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, GuidValue = None):
def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, IsOverrided = False, GuidValue = None):
self.TokenCName = Name
self.TokenSpaceGuidCName = Guid
self.TokenSpaceGuidValue = GuidValue
@ -55,6 +56,7 @@ class PcdClassObject(object):
self.SkuInfoList = SkuInfoList
self.Phase = "DXE"
self.Pending = False
self.IsOverrided = IsOverrided
## Convert the class to a string
#
@ -73,7 +75,7 @@ class PcdClassObject(object):
'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
for Item in self.SkuInfoList.values():
Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
Rtn = Rtn + str(self.IsOverrided)
Rtn = Rtn + ', IsOverrided=' + str(self.IsOverrided)
return Rtn

View File

@ -82,6 +82,7 @@ class MetaFileParser(object):
self.MetaFile = FilePath
self._FileDir = os.path.dirname(self.MetaFile)
self._Macros = copy.copy(Macros)
self._Macros["WORKSPACE"] = os.environ["WORKSPACE"]
# for recursive parsing
self._Owner = Owner
@ -490,7 +491,12 @@ class InfParser(MetaFileParser):
## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
def _PcdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
if len(ValueList) != 2:
EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList[0:1] = ValueList
if len(TokenList) > 1:
self._ValueList[2] = TokenList[1]
if self._ValueList[0] == '' or self._ValueList[1] == '':
@ -564,6 +570,7 @@ class DscParser(MetaFileParser):
# sections which allow "!include" directive
_IncludeAllowedSection = [
TAB_COMMON_DEFINES.upper(),
TAB_LIBRARIES.upper(),
TAB_LIBRARY_CLASSES.upper(),
TAB_SKUIDS.upper(),
@ -648,7 +655,25 @@ class DscParser(MetaFileParser):
continue
# file private macros
elif Line.upper().startswith('DEFINE '):
self._MacroParser()
(Name, Value) = self._MacroParser()
# Make the defined macro in DSC [Defines] section also
# available for FDF file.
if self._SectionName == TAB_COMMON_DEFINES.upper():
self._LastItem = self._Store(
MODEL_META_DATA_GLOBAL_DEFINE,
Name,
Value,
'',
'COMMON',
'COMMON',
self._Owner,
self._From,
self._LineIndex+1,
-1,
self._LineIndex+1,
-1,
self._Enabled
)
continue
elif Line.upper().startswith('EDK_GLOBAL '):
(Name, Value) = self._MacroParser()
@ -715,6 +740,22 @@ class DscParser(MetaFileParser):
if TokenList[0] in ['FLASH_DEFINITION', 'OUTPUT_DIRECTORY']:
TokenList[1] = NormPath(TokenList[1], self._Macros)
self._ValueList[0:len(TokenList)] = TokenList
# Treat elements in the [defines] section as global macros for FDF file.
self._LastItem = self._Store(
MODEL_META_DATA_GLOBAL_DEFINE,
TokenList[0],
TokenList[1],
'',
'COMMON',
'COMMON',
self._Owner,
self._From,
self._LineIndex+1,
-1,
self._LineIndex+1,
-1,
self._Enabled
)
## <subsection_header> parser
def _SubsectionHeaderParser(self):
@ -762,7 +803,7 @@ class DscParser(MetaFileParser):
EdkLogger.error("Parser", FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1,
ExtraData="'!include' is not allowed under section [%s]" % self._SectionName)
# the included file must be relative to the parsing file
IncludedFile = os.path.join(self._FileDir, self._ValueList[1])
IncludedFile = os.path.join(self._FileDir, NormPath(self._ValueList[1], self._Macros))
Parser = DscParser(IncludedFile, self._FileType, self._Table, self._Macros, From=self._LastItem)
# set the parser status with current status
Parser._SectionName = self._SectionName
@ -781,6 +822,7 @@ class DscParser(MetaFileParser):
self._SectionType = Parser._SectionType
self._Scope = Parser._Scope
self._Enabled = Parser._Enabled
self._Macros.update(Parser._Macros)
else:
if DirectiveName in ["!IF", "!IFDEF", "!IFNDEF"]:
# evaluate the expression
@ -965,6 +1007,7 @@ class DecParser(MetaFileParser):
#
def __init__(self, FilePath, FileType, Table, Macro=None):
MetaFileParser.__init__(self, FilePath, FileType, Table, Macro, -1)
self._Comments = []
## Parser starter
def Start(self):
@ -975,27 +1018,34 @@ class DecParser(MetaFileParser):
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
for Index in range(0, len(self._Content)):
Line = CleanString(self._Content[Index])
Line, Comment = CleanString2(self._Content[Index])
self._CurrentLine = Line
self._LineIndex = Index
# save comment for later use
if Comment:
self._Comments.append((Comment, self._LineIndex+1))
# skip empty line
if Line == '':
continue
self._CurrentLine = Line
self._LineIndex = Index
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionHeaderParser()
self._Comments = []
continue
elif Line.startswith('DEFINE '):
self._MacroParser()
continue
elif len(self._SectionType) == 0:
self._Comments = []
continue
# section content
self._ValueList = ['','','']
self._SectionParser[self._SectionType[0]](self)
if self._ValueList == None:
self._Comments = []
continue
#
@ -1017,6 +1067,22 @@ class DecParser(MetaFileParser):
-1,
0
)
for Comment, LineNo in self._Comments:
self._Store(
MODEL_META_DATA_COMMENT,
Comment,
self._ValueList[0],
self._ValueList[1],
Arch,
ModuleType,
self._LastItem,
LineNo,
-1,
LineNo,
-1,
0
)
self._Comments = []
self._Done()
## Section header parser

View File

@ -18,6 +18,7 @@ import sqlite3
import os
import os.path
import pickle
import uuid
import Common.EdkLogger as EdkLogger
import Common.GlobalData as GlobalData
@ -100,6 +101,10 @@ class DscBuildData(PlatformBuildClassObject):
for Record in RecordList:
GlobalData.gEdkGlobal[Record[0]] = Record[1]
RecordList = self._RawData[MODEL_META_DATA_GLOBAL_DEFINE, self._Arch]
for Record in RecordList:
GlobalData.gGlobalDefines[Record[0]] = Record[1]
## XXX[key] = value
def __setitem__(self, key, value):
self.__dict__[self._PROPERTY_[key]] = value
@ -135,6 +140,8 @@ class DscBuildData(PlatformBuildClassObject):
self._Pcds = None
self._BuildOptions = None
self._LoadFixAddress = None
self._VpdToolGuid = None
self._VpdFileName = None
## Get architecture
def _GetArch(self):
@ -188,6 +195,18 @@ class DscBuildData(PlatformBuildClassObject):
self._SkuName = Record[1]
elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
self._LoadFixAddress = Record[1]
elif Name == TAB_DSC_DEFINES_VPD_TOOL_GUID:
#
# try to convert GUID to a real UUID value to see whether the GUID is format
# for VPD_TOOL_GUID is correct.
#
try:
uuid.UUID(Record[1])
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)
self._VpdToolGuid = Record[1]
elif Name == TAB_DSC_DEFINES_VPD_FILENAME:
self._VpdFileName = Record[1]
# set _Header to non-None in order to avoid database re-querying
self._Header = 'DUMMY'
@ -267,6 +286,8 @@ class DscBuildData(PlatformBuildClassObject):
def _SetSkuName(self, Value):
if Value in self.SkuIds:
self._SkuName = Value
# Needs to re-retrieve the PCD information
self._Pcds = None
def _GetFdfFile(self):
if self._FlashDefinition == None:
@ -321,6 +342,24 @@ class DscBuildData(PlatformBuildClassObject):
self._LoadFixAddress = ''
return self._LoadFixAddress
## Retrieve the GUID string for VPD tool
def _GetVpdToolGuid(self):
if self._VpdToolGuid == None:
if self._Header == None:
self._GetHeaderInfo()
if self._VpdToolGuid == None:
self._VpdToolGuid = ''
return self._VpdToolGuid
## Retrieve the VPD file Name, this is optional in DSC file
def _GetVpdFileName(self):
if self._VpdFileName == None:
if self._Header == None:
self._GetHeaderInfo()
if self._VpdFileName == None:
self._VpdFileName = ''
return self._VpdFileName
## Retrieve [SkuIds] section information
def _GetSkuIds(self):
if self._SkuIds == None:
@ -418,6 +457,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
MaxDatumSize,
{},
False,
None
)
Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd
@ -576,6 +616,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
MaxDatumSize,
{},
False,
None
)
return Pcds
@ -619,6 +660,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
MaxDatumSize,
{self.SkuName : SkuInfo},
False,
None
)
return Pcds
@ -661,6 +703,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
'',
{self.SkuName : SkuInfo},
False,
None
)
return Pcds
@ -686,15 +729,21 @@ class DscBuildData(PlatformBuildClassObject):
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
for PcdCName, TokenSpaceGuid in PcdSet:
ValueList = ['', '']
ValueList = ['', '', '']
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
if Setting == None:
continue
TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
VpdOffset, MaxDatumSize = ValueList
#
# For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
# For the Integer & Boolean type, the optional data can only be InitialValue.
# At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
# until the DEC parser has been called.
#
VpdOffset, MaxDatumSize, InitialValue = ValueList
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset)
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset, InitialValue)
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
TokenSpaceGuid,
@ -704,6 +753,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
MaxDatumSize,
{self.SkuName : SkuInfo},
False,
None
)
return Pcds
@ -733,7 +783,7 @@ class DscBuildData(PlatformBuildClassObject):
#
def AddPcd(self, Name, Guid, Value):
if (Name, Guid) not in self.Pcds:
self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, None)
self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)
self.Pcds[Name, Guid].DefaultValue = Value
Arch = property(_GetArch, _SetArch)
@ -752,7 +802,8 @@ class DscBuildData(PlatformBuildClassObject):
BsBaseAddress = property(_GetBsBaseAddress)
RtBaseAddress = property(_GetRtBaseAddress)
LoadFixAddress = property(_GetLoadFixAddress)
VpdToolGuid = property(_GetVpdToolGuid)
VpdFileName = property(_GetVpdFileName)
SkuIds = property(_GetSkuIds)
Modules = property(_GetModules)
LibraryInstances = property(_GetLibraryInstances)
@ -760,7 +811,7 @@ class DscBuildData(PlatformBuildClassObject):
Pcds = property(_GetPcds)
BuildOptions = property(_GetBuildOptions)
## Platform build information from DSC file
## Platform build information from DEC file
#
# This class is used to retrieve information stored in database and convert them
# into PackageBuildClassObject form for easier use for AutoGen.
@ -789,6 +840,7 @@ class DecBuildData(PackageBuildClassObject):
TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",
}
@ -830,6 +882,7 @@ class DecBuildData(PackageBuildClassObject):
self._PackageName = None
self._Guid = None
self._Version = None
self._PkgUniFile = None
self._Protocols = None
self._Ppis = None
self._Guids = None
@ -1063,6 +1116,7 @@ class DecBuildData(PackageBuildClassObject):
TokenNumber,
'',
{},
False,
None
)
return Pcds
@ -1914,6 +1968,7 @@ class InfBuildData(ModuleBuildClassObject):
'',
'',
{},
False,
self.Guids[TokenSpaceGuid]
)
@ -1927,7 +1982,7 @@ class InfBuildData(ModuleBuildClassObject):
# "FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"
#
PcdType = self._PCD_TYPE_STRING_[Type]
if Type in [MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
if Type == MODEL_PCD_DYNAMIC:
Pcd.Pending = True
for T in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:
if (PcdCName, TokenSpaceGuid, T) in Package.Pcds:
@ -1994,7 +2049,7 @@ class InfBuildData(ModuleBuildClassObject):
## Database
#
# This class defined the build databse for all modules, packages and platform.
# This class defined the build database for all modules, packages and platform.
# It will call corresponding parser for the given file if it cannot find it in
# the database.
#

View File

@ -23,6 +23,7 @@ import textwrap
import traceback
import sys
import time
import struct
from datetime import datetime
from StringIO import StringIO
from Common import EdkLogger
@ -101,6 +102,9 @@ gDriverTypeMap = {
'SMM_DRIVER' : '0xA (SMM)', # Extension of module type to support PI 1.1 SMM drivers
}
## The look up table of the supported opcode in the dependency expression binaries
gOpCodeList = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "TRUE", "FALSE", "END", "SOR"]
##
# Writes a string to the file object.
#
@ -162,6 +166,60 @@ def FindIncludeFiles(Source, IncludePathList, IncludeFiles):
IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
break
##
# Parse binary dependency expression section
#
# This utility class parses the dependency expression section and translate the readable
# GUID name and value.
#
class DepexParser(object):
##
# Constructor function for class DepexParser
#
# This constructor function collect GUID values so that the readable
# GUID name can be translated.
#
# @param self The object pointer
# @param Wa Workspace context information
#
def __init__(self, Wa):
self._GuidDb = {}
for Package in Wa.BuildDatabase.WorkspaceDb.PackageList:
for Protocol in Package.Protocols:
GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol])
self._GuidDb[GuidValue.upper()] = Protocol
for Ppi in Package.Ppis:
GuidValue = GuidStructureStringToGuidString(Package.Ppis[Ppi])
self._GuidDb[GuidValue.upper()] = Ppi
for Guid in Package.Guids:
GuidValue = GuidStructureStringToGuidString(Package.Guids[Guid])
self._GuidDb[GuidValue.upper()] = Guid
##
# Parse the binary dependency expression files.
#
# This function parses the binary dependency expression file and translate it
# to the instruction list.
#
# @param self The object pointer
# @param DepexFileName The file name of binary dependency expression file.
#
def ParseDepexFile(self, DepexFileName):
DepexFile = open(DepexFileName, "rb")
DepexStatement = []
OpCode = DepexFile.read(1)
while OpCode:
Statement = gOpCodeList[struct.unpack("B", OpCode)[0]]
if Statement in ["BEFORE", "AFTER", "PUSH"]:
GuidValue = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" % \
struct.unpack("LHHBBBBBBBB", DepexFile.read(16))
GuidString = self._GuidDb.get(GuidValue, GuidValue)
Statement = "%s %s" % (Statement, GuidString)
DepexStatement.append(Statement)
OpCode = DepexFile.read(1)
return DepexStatement
##
# Reports library information
#
@ -254,6 +312,7 @@ class DepexReport(object):
#
def __init__(self, M):
self.Depex = ""
self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex")
ModuleType = M.ModuleType
if not ModuleType:
ModuleType = gComponentType2ModuleType.get(M.ComponentType, "")
@ -291,12 +350,23 @@ class DepexReport(object):
#
# @param self The object pointer
# @param File The file object for report
# @param GlobalDepexParser The platform global Dependency expression parser object
#
def GenerateReport(self, File):
def GenerateReport(self, File, GlobalDepexParser):
if not self.Depex:
return
FileWrite(File, gSubSectionStart)
if os.path.isfile(self._DepexFileName):
try:
DepexStatements = GlobalDepexParser.ParseDepexFile(self._DepexFileName)
FileWrite(File, "Final Dependency Expression (DEPEX) Instructions")
for DepexStatement in DepexStatements:
FileWrite(File, " %s" % DepexStatement)
FileWrite(File, gSubSectionSep)
except:
EdkLogger.warn(None, "Dependency expression file is corrupted", self._DepexFileName)
FileWrite(File, "Dependency Expression (DEPEX) from %s" % self.Source)
if self.Source == "INF":
@ -455,10 +525,12 @@ class ModuleReport(object):
#
# @param self The object pointer
# @param File The file object for report
# @param GlobalPcdReport The platform global PCD class object
# @param GlobalPcdReport The platform global PCD report object
# @param GlobalPredictionReport The platform global Prediction report object
# @param GlobalDepexParser The platform global Dependency expression parser object
# @param ReportType The kind of report items in the final report file
#
def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, ReportType):
def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, GlobalDepexParser, ReportType):
FileWrite(File, gSectionStart)
FwReportFileName = os.path.join(self._BuildDir, "DEBUG", self.ModuleName + ".txt")
@ -505,7 +577,7 @@ class ModuleReport(object):
self.LibraryReport.GenerateReport(File)
if "DEPEX" in ReportType:
self.DepexReport.GenerateReport(File)
self.DepexReport.GenerateReport(File, GlobalDepexParser)
if "BUILD_FLAGS" in ReportType:
self.BuildFlagsReport.GenerateReport(File)
@ -1325,6 +1397,10 @@ class PlatformReport(object):
if "FIXED_ADDRESS" in ReportType or "EXECUTION_ORDER" in ReportType:
self.PredictionReport = PredictionReport(Wa)
self.DepexParser = None
if "DEPEX" in ReportType:
self.DepexParser = DepexParser(Wa)
self.ModuleReportList = []
if MaList != None:
self._IsModuleBuild = True
@ -1371,7 +1447,7 @@ class PlatformReport(object):
FdReportListItem.GenerateReport(File)
for ModuleReportItem in self.ModuleReportList:
ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, ReportType)
ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, self.DepexParser, ReportType)
if not self._IsModuleBuild:
if "EXECUTION_ORDER" in ReportType:

View File

@ -23,6 +23,7 @@ import glob
import time
import platform
import traceback
import encodings.ascii
from struct import *
from threading import *
@ -735,7 +736,7 @@ class Build():
self.LoadFixAddress = 0
self.UniFlag = UniFlag
# print dot charater during doing some time-consuming work
# print dot character during doing some time-consuming work
self.Progress = Utils.Progressor()
# parse target.txt, tools_def.txt, and platform file