mirror of https://github.com/acidanthera/audk.git
Sync EDKII BaseTools to BaseTools project r2042.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@10850 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
parent
034ffda8b2
commit
e56468c072
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
RunToolFromSource
|
|
@ -295,7 +295,7 @@
|
||||||
$(DEBUG_DIR)(+)$(MODULE_NAME).efi
|
$(DEBUG_DIR)(+)$(MODULE_NAME).efi
|
||||||
|
|
||||||
<Command.MSFT, Command.INTEL, Command.RVCT, Command.ARMGCC>
|
<Command.MSFT, Command.INTEL, Command.RVCT, Command.ARMGCC>
|
||||||
GenFw -e $(MODULE_TYPE) -o ${dst} ${src}
|
"$(GENFW)" -e $(MODULE_TYPE) -o ${dst} ${src} $(GENFW_FLAGS)
|
||||||
$(CP) ${dst} $(OUTPUT_DIR)
|
$(CP) ${dst} $(OUTPUT_DIR)
|
||||||
$(CP) ${dst} $(BIN_DIR)
|
$(CP) ${dst} $(BIN_DIR)
|
||||||
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
|
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
|
||||||
|
@ -304,7 +304,7 @@
|
||||||
$(OBJCOPY) --only-keep-debug ${src} $(BIN_DIR)(+)$(MODULE_NAME).debug
|
$(OBJCOPY) --only-keep-debug ${src} $(BIN_DIR)(+)$(MODULE_NAME).debug
|
||||||
$(OBJCOPY) --strip-unneeded ${src}
|
$(OBJCOPY) --strip-unneeded ${src}
|
||||||
$(OBJCOPY) --add-gnu-debuglink=$(BIN_DIR)(+)$(MODULE_NAME).debug ${src}
|
$(OBJCOPY) --add-gnu-debuglink=$(BIN_DIR)(+)$(MODULE_NAME).debug ${src}
|
||||||
GenFw -e $(MODULE_TYPE) -o ${dst} ${src}
|
"$(GENFW)" -e $(MODULE_TYPE) -o ${dst} ${src} $(GENFW_FLAGS)
|
||||||
$(CP) ${dst} $(OUTPUT_DIR)
|
$(CP) ${dst} $(OUTPUT_DIR)
|
||||||
$(CP) ${dst} $(BIN_DIR)
|
$(CP) ${dst} $(BIN_DIR)
|
||||||
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
|
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
|
||||||
|
@ -314,7 +314,7 @@
|
||||||
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) ${src} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff
|
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) ${src} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff
|
||||||
# create symbol file for GDB debug
|
# create symbol file for GDB debug
|
||||||
-$(DSYMUTIL) ${src}
|
-$(DSYMUTIL) ${src}
|
||||||
GenFw -e $(MODULE_TYPE) -o ${dst} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff
|
"$(GENFW)" -e $(MODULE_TYPE) -o ${dst} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff $(GENFW_FLAGS)
|
||||||
$(CP) ${dst} $(OUTPUT_DIR)
|
$(CP) ${dst} $(OUTPUT_DIR)
|
||||||
$(CP) ${dst} $(BIN_DIR)
|
$(CP) ${dst} $(BIN_DIR)
|
||||||
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
|
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
|
||||||
|
@ -367,12 +367,12 @@
|
||||||
<Command.MSFT, Command.INTEL>
|
<Command.MSFT, Command.INTEL>
|
||||||
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
||||||
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
||||||
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
|
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
|
||||||
|
|
||||||
<Command.GCC>
|
<Command.GCC>
|
||||||
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
||||||
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
||||||
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
|
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
|
||||||
|
|
||||||
[Acpi-Table-Code-File]
|
[Acpi-Table-Code-File]
|
||||||
<InputFile>
|
<InputFile>
|
||||||
|
@ -387,18 +387,18 @@
|
||||||
<Command.MSFT, Command.INTEL>
|
<Command.MSFT, Command.INTEL>
|
||||||
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
||||||
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
||||||
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
|
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
|
||||||
|
|
||||||
<Command.GCC>
|
<Command.GCC>
|
||||||
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
|
||||||
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
||||||
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
|
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
|
||||||
|
|
||||||
<Command.XCODE>
|
<Command.XCODE>
|
||||||
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(ASLCC_FLAGS) $(INC) ${src}
|
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(ASLCC_FLAGS) $(INC) ${src}
|
||||||
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
|
||||||
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi
|
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi
|
||||||
GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi
|
"$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi $(GENFW_FLAGS)
|
||||||
|
|
||||||
|
|
||||||
[Masm16-Code-File]
|
[Masm16-Code-File]
|
||||||
|
@ -438,7 +438,7 @@
|
||||||
$(OUTPUT_DIR)(+)${s_base}.mcb
|
$(OUTPUT_DIR)(+)${s_base}.mcb
|
||||||
|
|
||||||
<Command>
|
<Command>
|
||||||
GenFw -o ${dst} -m ${src}
|
"$(GENFW)" -o ${dst} -m ${src} $(GENFW_FLAGS)
|
||||||
|
|
||||||
[Microcode-Binary-File]
|
[Microcode-Binary-File]
|
||||||
<InputFile>
|
<InputFile>
|
||||||
|
@ -451,7 +451,7 @@
|
||||||
$(OUTPUT_DIR)(+)$(MODULE_NAME).bin
|
$(OUTPUT_DIR)(+)$(MODULE_NAME).bin
|
||||||
|
|
||||||
<Command>
|
<Command>
|
||||||
GenFw -o ${dst} -j $(MICROCODE_BINARY_FILES)
|
"$(GENFW)" -o ${dst} -j $(MICROCODE_BINARY_FILES) $(GENFW_FLAGS)
|
||||||
-$(CP) ${dst} $(BIN_DIR)
|
-$(CP) ${dst} $(BIN_DIR)
|
||||||
|
|
||||||
[EFI-Image-File]
|
[EFI-Image-File]
|
||||||
|
@ -518,9 +518,9 @@
|
||||||
$(OUTPUT_DIR)(+)$(MODULE_NAME)hii.lib
|
$(OUTPUT_DIR)(+)$(MODULE_NAME)hii.lib
|
||||||
|
|
||||||
<Command.MSFT, Command.INTEL>
|
<Command.MSFT, Command.INTEL>
|
||||||
GenFw -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiipackage $(HII_BINARY_PACKAGES)
|
"$(GENFW)" -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiipackage $(HII_BINARY_PACKAGES) $(GENFW_FLAGS)
|
||||||
"$(RC)" /Fo${dst} $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc
|
"$(RC)" /Fo${dst} $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc
|
||||||
|
|
||||||
<Command.GCC>
|
<Command.GCC>
|
||||||
GenFw -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiibinpackage $(HII_BINARY_PACKAGES)
|
"$(GENFW)" -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiibinpackage $(HII_BINARY_PACKAGES) $(GENFW_FLAGS)
|
||||||
"$(RC)" $(RC_FLAGS) $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc ${dst}
|
"$(RC)" $(RC_FLAGS) $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc ${dst}
|
||||||
|
|
|
@ -63,10 +63,6 @@ TOOL_CHAIN_TAG = MYTOOLS
|
||||||
# cores or CPUs. Less than 2 means disable multithread build.
|
# cores or CPUs. Less than 2 means disable multithread build.
|
||||||
MAX_CONCURRENT_THREAD_NUMBER = 1
|
MAX_CONCURRENT_THREAD_NUMBER = 1
|
||||||
|
|
||||||
# MULTIPLE_THREAD BOOLEAN Optional If "Enable", multi-thread is enable for bulding.
|
|
||||||
# If "Disable", multi-thread is disable for building.
|
|
||||||
MULTIPLE_THREAD = Disable
|
|
||||||
|
|
||||||
# Build rules definition
|
# Build rules definition
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
|
@ -86,8 +86,8 @@ DEFINE ICC11_ASM32x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERS
|
||||||
|
|
||||||
DEFINE ICC11_BINX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
|
DEFINE ICC11_BINX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
|
||||||
DEFINE ICC11_ASMX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
|
DEFINE ICC11_ASMX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
|
||||||
DEFINE ICC11_BINX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
|
DEFINE ICC11_BINX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\intel64
|
||||||
DEFINE ICC11_ASMX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
|
DEFINE ICC11_ASMX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\intel64
|
||||||
|
|
||||||
DEFINE ICC11_BIN64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64
|
DEFINE ICC11_BIN64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64
|
||||||
DEFINE ICC11_BIN64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64
|
DEFINE ICC11_BIN64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64
|
||||||
|
@ -3308,8 +3308,8 @@ RELEASE_XCODE32_ARM_ASM_FLAGS = $(ARCHASM_FLAGS)
|
||||||
*_XCODE32_ARM_PP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -E -x assembler-with-cpp -include $(DEST_DIR_DEBUG)/AutoGen.h
|
*_XCODE32_ARM_PP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -E -x assembler-with-cpp -include $(DEST_DIR_DEBUG)/AutoGen.h
|
||||||
*_XCODE32_ARM_VFRPP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -x c -E -P -DVFRCOMPILE --include $(DEST_DIR_DEBUG)/$(MODULE_NAME)StrDefs.h
|
*_XCODE32_ARM_VFRPP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -x c -E -P -DVFRCOMPILE --include $(DEST_DIR_DEBUG)/$(MODULE_NAME)StrDefs.h
|
||||||
|
|
||||||
DEBUG_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -g -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -mdynamic-no-pic -fno-stack-protector
|
DEBUG_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -g -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -fno-stack-protector
|
||||||
RELEASE_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -mdynamic-no-pic -fno-stack-protector
|
RELEASE_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -fno-stack-protector
|
||||||
|
|
||||||
|
|
||||||
####################################################################################
|
####################################################################################
|
||||||
|
@ -3481,6 +3481,12 @@ RELEASE_ARMGCC_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mfpu=fpa -mlitt
|
||||||
*_*_*_OPTROM_PATH = EfiRom
|
*_*_*_OPTROM_PATH = EfiRom
|
||||||
*_*_*_OPTROM_FLAGS = -e
|
*_*_*_OPTROM_FLAGS = -e
|
||||||
|
|
||||||
|
##################
|
||||||
|
# GenFw tool definitions
|
||||||
|
##################
|
||||||
|
*_*_*_GENFW_PATH = GenFw
|
||||||
|
*_*_*_GENFW_FLAGS =
|
||||||
|
|
||||||
##################
|
##################
|
||||||
# Asl Compiler definitions
|
# Asl Compiler definitions
|
||||||
##################
|
##################
|
||||||
|
@ -3501,3 +3507,8 @@ RELEASE_ARMGCC_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mfpu=fpa -mlitt
|
||||||
*_*_*_TIANO_PATH = TianoCompress
|
*_*_*_TIANO_PATH = TianoCompress
|
||||||
*_*_*_TIANO_GUID = A31280AD-481E-41B6-95E8-127F4C984779
|
*_*_*_TIANO_GUID = A31280AD-481E-41B6-95E8-127F4C984779
|
||||||
|
|
||||||
|
##################
|
||||||
|
# BPDG tool definitions
|
||||||
|
##################
|
||||||
|
*_*_*_VPDTOOL_PATH = BPDG
|
||||||
|
*_*_*_VPDTOOL_GUID = 8C3D856A-9BE6-468E-850A-24F7A8D38E08
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
## @file
|
## @file
|
||||||
#
|
#
|
||||||
# The makefile can be invoked with
|
# The makefile can be invoked with
|
||||||
# ARCH = x86_64 or x64 for EM64T build
|
# ARCH = x86_64 or x64 for EM64T build
|
||||||
# ARCH = ia32 or IA32 for IA32 build
|
# ARCH = ia32 or IA32 for IA32 build
|
||||||
# ARCH = ia64 or IA64 for IA64 build
|
# ARCH = ia64 or IA64 for IA64 build
|
||||||
#
|
#
|
||||||
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
|
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
|
||||||
# This program and the accompanying materials
|
# This program and the accompanying materials
|
||||||
# are licensed and made available under the terms and conditions of the BSD License
|
# are licensed and made available under the terms and conditions of the BSD License
|
||||||
|
@ -12,55 +12,52 @@
|
||||||
# http://opensource.org/licenses/bsd-license.php
|
# http://opensource.org/licenses/bsd-license.php
|
||||||
#
|
#
|
||||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
|
|
||||||
ARCH ?= IA32
|
ARCH ?= IA32
|
||||||
|
|
||||||
CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
|
CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
|
||||||
LINUX:=$(findstring Linux, $(shell uname -s))
|
LINUX:=$(findstring Linux, $(shell uname -s))
|
||||||
DARWIN:=$(findstring Darwin, $(shell uname -s))
|
DARWIN:=$(findstring Darwin, $(shell uname -s))
|
||||||
|
|
||||||
CC = gcc
|
CC = gcc
|
||||||
CXX = g++
|
CXX = g++
|
||||||
AS = gcc
|
AS = gcc
|
||||||
AR = ar
|
AR = ar
|
||||||
LD = ld
|
LD = ld
|
||||||
LINKER ?= $(CC)
|
LINKER ?= $(CC)
|
||||||
ifeq ($(ARCH), IA32)
|
ifeq ($(ARCH), IA32)
|
||||||
ARCH_INCLUDE = -I $(MAKEROOT)/Include/Ia32/
|
ARCH_INCLUDE = -I $(MAKEROOT)/Include/Ia32/
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(ARCH), X64)
|
ifeq ($(ARCH), X64)
|
||||||
ARCH_INCLUDE = -I $(MAKEROOT)/Include/X64/
|
ARCH_INCLUDE = -I $(MAKEROOT)/Include/X64/
|
||||||
endif
|
endif
|
||||||
|
|
||||||
INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE)
|
INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE)
|
||||||
CPPFLAGS = $(INCLUDE)
|
CPPFLAGS = $(INCLUDE)
|
||||||
CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fno-merge-constants -nostdlib -Wall -Werror -c -g
|
CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fno-merge-constants -nostdlib -Wall -Werror -c -g
|
||||||
LFLAGS =
|
LFLAGS =
|
||||||
|
|
||||||
#
|
#
|
||||||
# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns -i386, but gcc defaults
|
# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns -i386, but gcc defaults
|
||||||
# to x86_64. So make sure tools match uname -m
|
# to x86_64. So make sure tools match uname -m
|
||||||
#
|
#
|
||||||
uname_s = $(shell uname -s)
|
uname_s = $(shell uname -s)
|
||||||
uname_m = $(shell uname -m)
|
ifeq ($(uname_s),Darwin)
|
||||||
ifeq ($(uname_s),Darwin)
|
CFLAGS += -arch i386
|
||||||
ifeq ($(uname_m),i386)
|
CPPFLAGS += -arch i386
|
||||||
CFLAGS += -arch i386
|
LFLAGS += -arch i386
|
||||||
CPPFLAGS += -arch i386
|
endif
|
||||||
LFLAGS += -arch i386
|
|
||||||
endif
|
.PHONY: all
|
||||||
endif
|
.PHONY: install
|
||||||
|
.PHONY: clean
|
||||||
.PHONY: all
|
|
||||||
.PHONY: install
|
all:
|
||||||
.PHONY: clean
|
|
||||||
|
$(MAKEROOT)/libs:
|
||||||
all:
|
mkdir $(MAKEROOT)/libs
|
||||||
|
|
||||||
$(MAKEROOT)/libs:
|
$(MAKEROOT)/bin:
|
||||||
mkdir $(MAKEROOT)/libs
|
mkdir $(MAKEROOT)/bin
|
||||||
|
|
||||||
$(MAKEROOT)/bin:
|
|
||||||
mkdir $(MAKEROOT)/bin
|
|
||||||
|
|
|
@ -34,6 +34,7 @@ import Common.GlobalData as GlobalData
|
||||||
from GenFds.FdfParser import *
|
from GenFds.FdfParser import *
|
||||||
from CommonDataClass.CommonClass import SkuInfoClass
|
from CommonDataClass.CommonClass import SkuInfoClass
|
||||||
from Workspace.BuildClassObject import *
|
from Workspace.BuildClassObject import *
|
||||||
|
import Common.VpdInfoFile as VpdInfoFile
|
||||||
|
|
||||||
## Regular expression for splitting Dependency Expression stirng into tokens
|
## Regular expression for splitting Dependency Expression stirng into tokens
|
||||||
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
|
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
|
||||||
|
@ -244,7 +245,7 @@ class WorkspaceAutoGen(AutoGen):
|
||||||
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
|
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
|
||||||
return self._BuildCommand
|
return self._BuildCommand
|
||||||
|
|
||||||
## Create makefile for the platform and mdoules in it
|
## Create makefile for the platform and modules in it
|
||||||
#
|
#
|
||||||
# @param CreateDepsMakeFile Flag indicating if the makefile for
|
# @param CreateDepsMakeFile Flag indicating if the makefile for
|
||||||
# modules will be created as well
|
# modules will be created as well
|
||||||
|
@ -476,19 +477,141 @@ class PlatformAutoGen(AutoGen):
|
||||||
UnicodePcdArray = []
|
UnicodePcdArray = []
|
||||||
HiiPcdArray = []
|
HiiPcdArray = []
|
||||||
OtherPcdArray = []
|
OtherPcdArray = []
|
||||||
for Pcd in self._DynamicPcdList:
|
VpdFile = VpdInfoFile.VpdInfoFile()
|
||||||
# just pick the a value to determine whether is unicode string type
|
NeedProcessVpdMapFile = False
|
||||||
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
|
|
||||||
PcdValue = Sku.DefaultValue
|
if (self.Workspace.ArchList[-1] == self.Arch):
|
||||||
if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
|
for Pcd in self._DynamicPcdList:
|
||||||
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
|
|
||||||
UnicodePcdArray.append(Pcd)
|
# just pick the a value to determine whether is unicode string type
|
||||||
elif len(Sku.VariableName) > 0:
|
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
|
||||||
# if found HII type PCD then insert to right of UnicodeIndex
|
Sku.VpdOffset = Sku.VpdOffset.strip()
|
||||||
HiiPcdArray.append(Pcd)
|
|
||||||
else:
|
PcdValue = Sku.DefaultValue
|
||||||
OtherPcdArray.append(Pcd)
|
if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
|
||||||
del self._DynamicPcdList[:]
|
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
|
||||||
|
UnicodePcdArray.append(Pcd)
|
||||||
|
elif len(Sku.VariableName) > 0:
|
||||||
|
# if found HII type PCD then insert to right of UnicodeIndex
|
||||||
|
HiiPcdArray.append(Pcd)
|
||||||
|
else:
|
||||||
|
OtherPcdArray.append(Pcd)
|
||||||
|
|
||||||
|
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
|
||||||
|
if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
|
||||||
|
#
|
||||||
|
# Fix the optional data of VPD PCD.
|
||||||
|
#
|
||||||
|
if (Pcd.DatumType.strip() != "VOID*"):
|
||||||
|
if Sku.DefaultValue == '':
|
||||||
|
Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue = Pcd.MaxDatumSize
|
||||||
|
Pcd.MaxDatumSize = None
|
||||||
|
else:
|
||||||
|
EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
|
||||||
|
File=self.MetaFile,
|
||||||
|
ExtraData="\n\tPCD: %s.%s format incorrect in DSC: %s\n\t\t\n"
|
||||||
|
% (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path))
|
||||||
|
|
||||||
|
VpdFile.Add(Pcd, Sku.VpdOffset)
|
||||||
|
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
|
||||||
|
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
|
||||||
|
NeedProcessVpdMapFile = True
|
||||||
|
|
||||||
|
#
|
||||||
|
# Fix the PCDs define in VPD PCD section that never referenced by module.
|
||||||
|
# An example is PCD for signature usage.
|
||||||
|
#
|
||||||
|
for DscPcd in self.Platform.Pcds:
|
||||||
|
DscPcdEntry = self.Platform.Pcds[DscPcd]
|
||||||
|
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
|
||||||
|
if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
|
||||||
|
FoundFlag = False
|
||||||
|
for VpdPcd in VpdFile._VpdArray.keys():
|
||||||
|
# This PCD has been referenced by module
|
||||||
|
if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
|
||||||
|
(VpdPcd.TokenCName == DscPcdEntry.TokenCName):
|
||||||
|
FoundFlag = True
|
||||||
|
|
||||||
|
# Not found, it should be signature
|
||||||
|
if not FoundFlag :
|
||||||
|
# just pick the a value to determine whether is unicode string type
|
||||||
|
Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]
|
||||||
|
Sku.VpdOffset = Sku.VpdOffset.strip()
|
||||||
|
|
||||||
|
# Need to iterate DEC pcd information to get the value & datumtype
|
||||||
|
for eachDec in self.PackageList:
|
||||||
|
for DecPcd in eachDec.Pcds:
|
||||||
|
DecPcdEntry = eachDec.Pcds[DecPcd]
|
||||||
|
if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
|
||||||
|
(DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
|
||||||
|
DscPcdEntry.DatumType = DecPcdEntry.DatumType
|
||||||
|
DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
|
||||||
|
Sku.DefaultValue = DecPcdEntry.DefaultValue
|
||||||
|
|
||||||
|
VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
|
||||||
|
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
|
||||||
|
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
|
||||||
|
NeedProcessVpdMapFile = True
|
||||||
|
|
||||||
|
|
||||||
|
if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
|
||||||
|
VpdFile.GetCount() != 0:
|
||||||
|
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
|
||||||
|
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
|
||||||
|
|
||||||
|
if VpdFile.GetCount() != 0:
|
||||||
|
WorkspaceDb = self.BuildDatabase.WorkspaceDb
|
||||||
|
DscTimeStamp = WorkspaceDb.GetTimeStamp(WorkspaceDb.GetFileId(str(self.Platform.MetaFile)))
|
||||||
|
FvPath = os.path.join(self.BuildDir, "FV")
|
||||||
|
if not os.path.exists(FvPath):
|
||||||
|
try:
|
||||||
|
os.makedirs(FvPath)
|
||||||
|
except:
|
||||||
|
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
|
||||||
|
|
||||||
|
VpdFileName = self.Platform.VpdFileName
|
||||||
|
if VpdFileName == None or VpdFileName == "" :
|
||||||
|
VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
|
||||||
|
else :
|
||||||
|
VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName)
|
||||||
|
|
||||||
|
if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
|
||||||
|
VpdFile.Write(VpdFilePath)
|
||||||
|
|
||||||
|
# retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
|
||||||
|
BPDGToolName = None
|
||||||
|
for ToolDef in self.ToolDefinition.values():
|
||||||
|
if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:
|
||||||
|
if not ToolDef.has_key("PATH"):
|
||||||
|
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
|
||||||
|
BPDGToolName = ToolDef["PATH"]
|
||||||
|
break
|
||||||
|
# Call third party GUID BPDG tool.
|
||||||
|
if BPDGToolName != None:
|
||||||
|
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath, VpdFileName)
|
||||||
|
else:
|
||||||
|
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
|
||||||
|
|
||||||
|
# Process VPD map file generated by third party BPDG tool
|
||||||
|
if NeedProcessVpdMapFile:
|
||||||
|
if VpdFileName == None or VpdFileName == "" :
|
||||||
|
VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
|
||||||
|
else :
|
||||||
|
VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)
|
||||||
|
if os.path.exists(VpdMapFilePath):
|
||||||
|
VpdFile.Read(VpdMapFilePath)
|
||||||
|
|
||||||
|
# Fixup "*" offset
|
||||||
|
for Pcd in self._DynamicPcdList:
|
||||||
|
# just pick the a value to determine whether is unicode string type
|
||||||
|
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
|
||||||
|
if Sku.VpdOffset == "*":
|
||||||
|
Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]
|
||||||
|
else:
|
||||||
|
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
|
||||||
|
|
||||||
|
# Delete the DynamicPcdList At the last time enter into this function
|
||||||
|
del self._DynamicPcdList[:]
|
||||||
self._DynamicPcdList.extend(UnicodePcdArray)
|
self._DynamicPcdList.extend(UnicodePcdArray)
|
||||||
self._DynamicPcdList.extend(HiiPcdArray)
|
self._DynamicPcdList.extend(HiiPcdArray)
|
||||||
self._DynamicPcdList.extend(OtherPcdArray)
|
self._DynamicPcdList.extend(OtherPcdArray)
|
||||||
|
@ -709,10 +832,14 @@ class PlatformAutoGen(AutoGen):
|
||||||
|
|
||||||
## Get list of non-dynamic PCDs
|
## Get list of non-dynamic PCDs
|
||||||
def _GetNonDynamicPcdList(self):
|
def _GetNonDynamicPcdList(self):
|
||||||
|
if self._NonDynamicPcdList == None:
|
||||||
|
self.CollectPlatformDynamicPcds()
|
||||||
return self._NonDynamicPcdList
|
return self._NonDynamicPcdList
|
||||||
|
|
||||||
## Get list of dynamic PCDs
|
## Get list of dynamic PCDs
|
||||||
def _GetDynamicPcdList(self):
|
def _GetDynamicPcdList(self):
|
||||||
|
if self._DynamicPcdList == None:
|
||||||
|
self.CollectPlatformDynamicPcds()
|
||||||
return self._DynamicPcdList
|
return self._DynamicPcdList
|
||||||
|
|
||||||
## Generate Token Number for all PCD
|
## Generate Token Number for all PCD
|
||||||
|
@ -952,6 +1079,10 @@ class PlatformAutoGen(AutoGen):
|
||||||
if FromPcd != None:
|
if FromPcd != None:
|
||||||
if ToPcd.Pending and FromPcd.Type not in [None, '']:
|
if ToPcd.Pending and FromPcd.Type not in [None, '']:
|
||||||
ToPcd.Type = FromPcd.Type
|
ToPcd.Type = FromPcd.Type
|
||||||
|
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
|
||||||
|
and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type):
|
||||||
|
if ToPcd.Type.strip() == "DynamicEx":
|
||||||
|
ToPcd.Type = FromPcd.Type
|
||||||
elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \
|
elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \
|
||||||
and ToPcd.Type != FromPcd.Type:
|
and ToPcd.Type != FromPcd.Type:
|
||||||
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
|
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
|
||||||
|
|
|
@ -1028,7 +1028,9 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
|
||||||
ArraySize = ArraySize / 2;
|
ArraySize = ArraySize / 2;
|
||||||
|
|
||||||
if ArraySize < (len(Value) + 1):
|
if ArraySize < (len(Value) + 1):
|
||||||
ArraySize = len(Value) + 1
|
EdkLogger.error("build", AUTOGEN_ERROR,
|
||||||
|
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
|
||||||
|
ExtraData="[%s]" % str(Info))
|
||||||
Value = NewValue + '0 }'
|
Value = NewValue + '0 }'
|
||||||
Array = '[%d]' % ArraySize
|
Array = '[%d]' % ArraySize
|
||||||
#
|
#
|
||||||
|
@ -1262,10 +1264,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
|
||||||
VariableHeadValueList = []
|
VariableHeadValueList = []
|
||||||
Pcd.InitString = 'UNINIT'
|
Pcd.InitString = 'UNINIT'
|
||||||
|
|
||||||
if Pcd.Type in ["DynamicVpd", "DynamicExVpd"]:
|
if Pcd.DatumType == 'VOID*':
|
||||||
Pcd.TokenTypeList = ['PCD_TYPE_VPD']
|
if Pcd.Type not in ["DynamicVpd", "DynamicExVpd"]:
|
||||||
elif Pcd.DatumType == 'VOID*':
|
Pcd.TokenTypeList = ['PCD_TYPE_STRING']
|
||||||
Pcd.TokenTypeList = ['PCD_TYPE_STRING']
|
else:
|
||||||
|
Pcd.TokenTypeList = []
|
||||||
elif Pcd.DatumType == 'BOOLEAN':
|
elif Pcd.DatumType == 'BOOLEAN':
|
||||||
Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8']
|
Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8']
|
||||||
else:
|
else:
|
||||||
|
@ -1364,8 +1367,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
|
||||||
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(Pcd.MaxDatumSize)
|
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(Pcd.MaxDatumSize)
|
||||||
if Pcd.MaxDatumSize != '':
|
if Pcd.MaxDatumSize != '':
|
||||||
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
|
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
|
||||||
if MaxDatumSize > Size:
|
if MaxDatumSize < Size:
|
||||||
Size = MaxDatumSize
|
EdkLogger.error("build", AUTOGEN_ERROR,
|
||||||
|
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
|
||||||
|
ExtraData="[%s]" % str(Platform))
|
||||||
|
Size = MaxDatumSize
|
||||||
Dict['STRING_TABLE_LENGTH'].append(Size)
|
Dict['STRING_TABLE_LENGTH'].append(Size)
|
||||||
StringTableIndex += 1
|
StringTableIndex += 1
|
||||||
StringTableSize += (Size)
|
StringTableSize += (Size)
|
||||||
|
|
|
@ -334,6 +334,7 @@ TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
|
||||||
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
|
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
|
||||||
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
|
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
|
||||||
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
|
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
|
||||||
|
TAB_DEC_DEFINES_PKG_UNI_FILE = 'PKG_UNI_FILE'
|
||||||
|
|
||||||
#
|
#
|
||||||
# Dsc Definitions
|
# Dsc Definitions
|
||||||
|
@ -353,6 +354,8 @@ TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
|
||||||
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
|
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
|
||||||
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
|
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
|
||||||
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
|
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
|
||||||
|
TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'
|
||||||
|
TAB_DSC_DEFINES_VPD_FILENAME = 'VPD_FILENAME'
|
||||||
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
|
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
|
||||||
|
|
||||||
#
|
#
|
||||||
|
|
|
@ -719,7 +719,7 @@ class TemplateString(object):
|
||||||
while Template:
|
while Template:
|
||||||
MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
|
MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
|
||||||
if not MatchObj:
|
if not MatchObj:
|
||||||
if MatchEnd < len(Template):
|
if MatchEnd <= len(Template):
|
||||||
TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
|
TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
|
||||||
TemplateSectionList.append(TemplateSection)
|
TemplateSectionList.append(TemplateSection)
|
||||||
break
|
break
|
||||||
|
|
|
@ -296,6 +296,50 @@ def CleanString(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppSty
|
||||||
|
|
||||||
return Line
|
return Line
|
||||||
|
|
||||||
|
## CleanString2
|
||||||
|
#
|
||||||
|
# Split comments in a string
|
||||||
|
# Remove spaces
|
||||||
|
#
|
||||||
|
# @param Line: The string to be cleaned
|
||||||
|
# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
|
||||||
|
#
|
||||||
|
# @retval Path Formatted path
|
||||||
|
#
|
||||||
|
def CleanString2(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
|
||||||
|
#
|
||||||
|
# remove whitespace
|
||||||
|
#
|
||||||
|
Line = Line.strip();
|
||||||
|
#
|
||||||
|
# Replace R8's comment character
|
||||||
|
#
|
||||||
|
if AllowCppStyleComment:
|
||||||
|
Line = Line.replace(DataType.TAB_COMMENT_R8_SPLIT, CommentCharacter)
|
||||||
|
#
|
||||||
|
# separate comments and statements
|
||||||
|
#
|
||||||
|
LineParts = Line.split(CommentCharacter, 1);
|
||||||
|
#
|
||||||
|
# remove whitespace again
|
||||||
|
#
|
||||||
|
Line = LineParts[0].strip();
|
||||||
|
if len(LineParts) > 1:
|
||||||
|
Comment = LineParts[1].strip()
|
||||||
|
# Remove prefixed and trailing comment characters
|
||||||
|
Start = 0
|
||||||
|
End = len(Comment)
|
||||||
|
while Start < End and Comment.startswith(CommentCharacter, Start, End):
|
||||||
|
Start += 1
|
||||||
|
while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
|
||||||
|
End -= 1
|
||||||
|
Comment = Comment[Start:End]
|
||||||
|
Comment = Comment.strip()
|
||||||
|
else:
|
||||||
|
Comment = ''
|
||||||
|
|
||||||
|
return Line, Comment
|
||||||
|
|
||||||
## GetMultipleValuesOfKeyFromLines
|
## GetMultipleValuesOfKeyFromLines
|
||||||
#
|
#
|
||||||
# Parse multiple strings to clean comment and spaces
|
# Parse multiple strings to clean comment and spaces
|
||||||
|
|
|
@ -0,0 +1,259 @@
|
||||||
|
## @file
|
||||||
|
#
|
||||||
|
# This package manage the VPD PCD information file which will be generated
|
||||||
|
# by build tool's autogen.
|
||||||
|
# The VPD PCD information file will be input for third-party BPDG tool which
|
||||||
|
# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
|
||||||
|
# This program and the accompanying materials
|
||||||
|
# are licensed and made available under the terms and conditions of the BSD License
|
||||||
|
# which accompanies this distribution. The full text of the license may be found at
|
||||||
|
# http://opensource.org/licenses/bsd-license.php
|
||||||
|
#
|
||||||
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
|
#
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import Common.EdkLogger as EdkLogger
|
||||||
|
import Common.BuildToolError as BuildToolError
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
FILE_COMMENT_TEMPLATE = \
|
||||||
|
"""
|
||||||
|
## @file
|
||||||
|
#
|
||||||
|
# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
|
||||||
|
#
|
||||||
|
# This file lists all VPD informations for a platform collected by build.exe.
|
||||||
|
#
|
||||||
|
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
|
||||||
|
# This program and the accompanying materials
|
||||||
|
# are licensed and made available under the terms and conditions of the BSD License
|
||||||
|
# which accompanies this distribution. The full text of the license may be found at
|
||||||
|
# http://opensource.org/licenses/bsd-license.php
|
||||||
|
#
|
||||||
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
## The class manage VpdInfoFile.
|
||||||
|
#
|
||||||
|
# This file contains an ordered (based on position in the DSC file) list of the PCDs specified in the platform description file (DSC). The Value field that will be assigned to the PCD comes from the DSC file, INF file (if not defined in the DSC file) or the DEC file (if not defined in the INF file). This file is used as an input to the BPDG tool.
|
||||||
|
# Format for this file (using EBNF notation) is:
|
||||||
|
# <File> :: = [<CommentBlock>]
|
||||||
|
# [<PcdEntry>]*
|
||||||
|
# <CommentBlock> ::= ["#" <String> <EOL>]*
|
||||||
|
# <PcdEntry> ::= <PcdName> "|" <Offset> "|" <Size> "|" <Value> <EOL>
|
||||||
|
# <PcdName> ::= <TokenSpaceCName> "." <PcdCName>
|
||||||
|
# <TokenSpaceCName> ::= C Variable Name of the Token Space GUID
|
||||||
|
# <PcdCName> ::= C Variable Name of the PCD
|
||||||
|
# <Offset> ::= {"*"} {<HexNumber>}
|
||||||
|
# <HexNumber> ::= "0x" (a-fA-F0-9){1,8}
|
||||||
|
# <Size> ::= <HexNumber>
|
||||||
|
# <Value> ::= {<HexNumber>} {<NonNegativeInt>} {<QString>} {<Array>}
|
||||||
|
# <NonNegativeInt> ::= (0-9)+
|
||||||
|
# <QString> ::= ["L"] <DblQuote> <String> <DblQuote>
|
||||||
|
# <DblQuote> ::= 0x22
|
||||||
|
# <Array> ::= {<CArray>} {<NList>}
|
||||||
|
# <CArray> ::= "{" <HexNumber> ["," <HexNumber>]* "}"
|
||||||
|
# <NList> ::= <HexNumber> ["," <HexNumber>]*
|
||||||
|
#
|
||||||
|
class VpdInfoFile:
|
||||||
|
|
||||||
|
## The mapping dictionary from datum type to size string.
|
||||||
|
_MAX_SIZE_TYPE = {"BOOLEAN":"1", "UINT8":"1", "UINT16":"2", "UINT32":"4", "UINT64":"8"}
|
||||||
|
_rVpdPcdLine = None
|
||||||
|
## Constructor
|
||||||
|
def __init__(self):
|
||||||
|
## Dictionary for VPD in following format
|
||||||
|
#
|
||||||
|
# Key : PcdClassObject instance.
|
||||||
|
# @see BuildClassObject.PcdClassObject
|
||||||
|
# Value : offset in different SKU such as [sku1_offset, sku2_offset]
|
||||||
|
self._VpdArray = {}
|
||||||
|
|
||||||
|
## Add a VPD PCD collected from platform's autogen when building.
|
||||||
|
#
|
||||||
|
# @param vpds The list of VPD PCD collected for a platform.
|
||||||
|
# @see BuildClassObject.PcdClassObject
|
||||||
|
#
|
||||||
|
# @param offset integer value for VPD's offset in specific SKU.
|
||||||
|
#
|
||||||
|
def Add(self, Vpd, Offset):
|
||||||
|
if (Vpd == None):
|
||||||
|
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
|
||||||
|
|
||||||
|
if not (Offset >= 0 or Offset == "*"):
|
||||||
|
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
|
||||||
|
|
||||||
|
if Vpd.DatumType == "VOID*":
|
||||||
|
if Vpd.MaxDatumSize <= 0:
|
||||||
|
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||||
|
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
||||||
|
elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]:
|
||||||
|
if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":
|
||||||
|
Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]
|
||||||
|
else:
|
||||||
|
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||||
|
"Invalid DatumType %s for VPD PCD %s.%s" % (Vpd.DatumType, Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
||||||
|
|
||||||
|
if Vpd not in self._VpdArray.keys():
|
||||||
|
#
|
||||||
|
# If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
|
||||||
|
#
|
||||||
|
self._VpdArray[Vpd] = [Offset]
|
||||||
|
else:
|
||||||
|
#
|
||||||
|
# If there is an offset for a specific SKU in dict, then append this offset for other sku to array.
|
||||||
|
#
|
||||||
|
self._VpdArray[Vpd].append(Offset)
|
||||||
|
|
||||||
|
|
||||||
|
## Generate VPD PCD information into a text file
|
||||||
|
#
|
||||||
|
# If parameter FilePath is invalid, then assert.
|
||||||
|
# If
|
||||||
|
# @param FilePath The given file path which would hold VPD information
|
||||||
|
def Write(self, FilePath):
|
||||||
|
if not (FilePath != None or len(FilePath) != 0):
|
||||||
|
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||||
|
"Invalid parameter FilePath: %s." % FilePath)
|
||||||
|
try:
|
||||||
|
fd = open(FilePath, "w")
|
||||||
|
except:
|
||||||
|
EdkLogger.error("VpdInfoFile",
|
||||||
|
BuildToolError.FILE_OPEN_FAILURE,
|
||||||
|
"Fail to open file %s for written." % FilePath)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# write file header
|
||||||
|
fd.write(FILE_COMMENT_TEMPLATE)
|
||||||
|
|
||||||
|
# write each of PCD in VPD type
|
||||||
|
for Pcd in self._VpdArray.keys():
|
||||||
|
for Offset in self._VpdArray[Pcd]:
|
||||||
|
PcdValue = str(Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue).strip()
|
||||||
|
if PcdValue == "" :
|
||||||
|
PcdValue = Pcd.DefaultValue
|
||||||
|
|
||||||
|
fd.write("%s.%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, str(Offset).strip(), str(Pcd.MaxDatumSize).strip(),PcdValue))
|
||||||
|
except:
|
||||||
|
EdkLogger.error("VpdInfoFile",
|
||||||
|
BuildToolError.FILE_WRITE_FAILURE,
|
||||||
|
"Fail to write file %s" % FilePath)
|
||||||
|
fd.close()
|
||||||
|
|
||||||
|
## Read an existing VPD PCD info file.
|
||||||
|
#
|
||||||
|
# This routine will read VPD PCD information from existing file and construct
|
||||||
|
# internal PcdClassObject array.
|
||||||
|
# This routine could be used by third-party tool to parse VPD info file content.
|
||||||
|
#
|
||||||
|
# @param FilePath The full path string for existing VPD PCD info file.
|
||||||
|
def Read(self, FilePath):
|
||||||
|
try:
|
||||||
|
fd = open(FilePath, "r")
|
||||||
|
except:
|
||||||
|
EdkLogger.error("VpdInfoFile",
|
||||||
|
BuildToolError.FILE_OPEN_FAILURE,
|
||||||
|
"Fail to open file %s for written." % FilePath)
|
||||||
|
Lines = fd.readlines()
|
||||||
|
for Line in Lines:
|
||||||
|
Line = Line.strip()
|
||||||
|
if len(Line) == 0 or Line.startswith("#"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
#
|
||||||
|
# the line must follow output format defined in BPDG spec.
|
||||||
|
#
|
||||||
|
try:
|
||||||
|
PcdName, Offset, Size, Value = Line.split("#")[0].split("|")
|
||||||
|
TokenSpaceName, PcdTokenName = PcdName.split(".")
|
||||||
|
except:
|
||||||
|
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
|
||||||
|
|
||||||
|
Found = False
|
||||||
|
for VpdObject in self._VpdArray.keys():
|
||||||
|
if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObject.TokenCName == PcdTokenName.strip():
|
||||||
|
if self._VpdArray[VpdObject][0] == "*":
|
||||||
|
if Offset == "*":
|
||||||
|
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
|
||||||
|
|
||||||
|
self._VpdArray[VpdObject][0] = Offset
|
||||||
|
Found = True
|
||||||
|
break
|
||||||
|
if not Found:
|
||||||
|
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
|
||||||
|
|
||||||
|
## Get count of VPD PCD collected from platform's autogen when building.
|
||||||
|
#
|
||||||
|
# @return The integer count value
|
||||||
|
def GetCount(self):
|
||||||
|
Count = 0
|
||||||
|
for OffsetList in self._VpdArray.values():
|
||||||
|
Count += len(OffsetList)
|
||||||
|
|
||||||
|
return Count
|
||||||
|
|
||||||
|
## Get an offset value for a given VPD PCD
|
||||||
|
#
|
||||||
|
# Because BPDG only support one Sku, so only return offset for SKU default.
|
||||||
|
#
|
||||||
|
# @param vpd A given VPD PCD
|
||||||
|
def GetOffset(self, vpd):
|
||||||
|
if not self._VpdArray.has_key(vpd):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if len(self._VpdArray[vpd]) == 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self._VpdArray[vpd]
|
||||||
|
|
||||||
|
## Call external BPDG tool to process VPD file
|
||||||
|
#
|
||||||
|
# @param ToolPath The string path name for BPDG tool
|
||||||
|
# @param VpdFileName The string path name for VPD information guid.txt
|
||||||
|
#
|
||||||
|
def CallExtenalBPDGTool(ToolPath, VpdFilePath, VpdFileName):
|
||||||
|
assert ToolPath != None, "Invalid parameter ToolPath"
|
||||||
|
assert VpdFilePath != None and os.path.exists(VpdFilePath), "Invalid parameter VpdFileName"
|
||||||
|
|
||||||
|
OutputDir = os.path.dirname(VpdFilePath)
|
||||||
|
if (VpdFileName == None or VpdFileName == "") :
|
||||||
|
FileName = os.path.basename(VpdFilePath)
|
||||||
|
BaseName, ext = os.path.splitext(FileName)
|
||||||
|
OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
|
||||||
|
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
|
||||||
|
else :
|
||||||
|
OutputMapFileName = os.path.join(OutputDir, "%s.map" % VpdFileName)
|
||||||
|
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % VpdFileName)
|
||||||
|
|
||||||
|
try:
|
||||||
|
PopenObject = subprocess.Popen([ToolPath,
|
||||||
|
'-o', OutputBinFileName,
|
||||||
|
'-m', OutputMapFileName,
|
||||||
|
'-s',
|
||||||
|
'-f',
|
||||||
|
'-v',
|
||||||
|
VpdFilePath],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr= subprocess.PIPE)
|
||||||
|
except Exception, X:
|
||||||
|
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))
|
||||||
|
(out, error) = PopenObject.communicate()
|
||||||
|
print out
|
||||||
|
while PopenObject.returncode == None :
|
||||||
|
PopenObject.wait()
|
||||||
|
|
||||||
|
if PopenObject.returncode != 0:
|
||||||
|
if PopenObject.returncode != 0:
|
||||||
|
EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))
|
||||||
|
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
|
||||||
|
(PopenObject.returncode, str(error)))
|
||||||
|
|
||||||
|
return PopenObject.returncode
|
|
@ -29,6 +29,7 @@ MODEL_FILE_DSC = 1013
|
||||||
MODEL_FILE_FDF = 1014
|
MODEL_FILE_FDF = 1014
|
||||||
MODEL_FILE_INC = 1015
|
MODEL_FILE_INC = 1015
|
||||||
MODEL_FILE_CIF = 1016
|
MODEL_FILE_CIF = 1016
|
||||||
|
MODEL_FILE_OTHERS = 1099
|
||||||
|
|
||||||
MODEL_IDENTIFIER_FILE_HEADER = 2001
|
MODEL_IDENTIFIER_FILE_HEADER = 2001
|
||||||
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
|
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
|
||||||
|
@ -91,6 +92,8 @@ MODEL_META_DATA_NMAKE = 5012
|
||||||
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013
|
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013
|
||||||
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
|
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
|
||||||
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
|
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
|
||||||
|
MODEL_META_DATA_COMMENT = 5016
|
||||||
|
MODEL_META_DATA_GLOBAL_DEFINE = 5017
|
||||||
|
|
||||||
MODEL_EXTERNAL_DEPENDENCY = 10000
|
MODEL_EXTERNAL_DEPENDENCY = 10000
|
||||||
|
|
||||||
|
@ -103,6 +106,8 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
|
||||||
('MODEL_FILE_DSC', MODEL_FILE_DSC),
|
('MODEL_FILE_DSC', MODEL_FILE_DSC),
|
||||||
('MODEL_FILE_FDF', MODEL_FILE_FDF),
|
('MODEL_FILE_FDF', MODEL_FILE_FDF),
|
||||||
('MODEL_FILE_INC', MODEL_FILE_INC),
|
('MODEL_FILE_INC', MODEL_FILE_INC),
|
||||||
|
('MODEL_FILE_CIF', MODEL_FILE_CIF),
|
||||||
|
('MODEL_FILE_OTHERS', MODEL_FILE_OTHERS),
|
||||||
('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
|
('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
|
||||||
('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
|
('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
|
||||||
('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
|
('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
|
||||||
|
@ -159,16 +164,17 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
|
||||||
("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
|
("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
|
||||||
('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
|
('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
|
||||||
('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
|
('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
|
||||||
('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE)
|
('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE),
|
||||||
|
('MODEL_META_DATA_COMMENT', MODEL_META_DATA_COMMENT)
|
||||||
]
|
]
|
||||||
|
|
||||||
## FunctionClass
|
## FunctionClass
|
||||||
#
|
#
|
||||||
# This class defines a structure of a function
|
# This class defines a structure of a function
|
||||||
#
|
#
|
||||||
# @param ID: ID of a Function
|
# @param ID: ID of a Function
|
||||||
# @param Header: Header of a Function
|
# @param Header: Header of a Function
|
||||||
# @param Modifier: Modifier of a Function
|
# @param Modifier: Modifier of a Function
|
||||||
# @param Name: Name of a Function
|
# @param Name: Name of a Function
|
||||||
# @param ReturnStatement: ReturnStatement of a Funciont
|
# @param ReturnStatement: ReturnStatement of a Funciont
|
||||||
# @param StartLine: StartLine of a Function
|
# @param StartLine: StartLine of a Function
|
||||||
|
@ -183,7 +189,7 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
|
||||||
#
|
#
|
||||||
# @var ID: ID of a Function
|
# @var ID: ID of a Function
|
||||||
# @var Header: Header of a Function
|
# @var Header: Header of a Function
|
||||||
# @var Modifier: Modifier of a Function
|
# @var Modifier: Modifier of a Function
|
||||||
# @var Name: Name of a Function
|
# @var Name: Name of a Function
|
||||||
# @var ReturnStatement: ReturnStatement of a Funciont
|
# @var ReturnStatement: ReturnStatement of a Funciont
|
||||||
# @var StartLine: StartLine of a Function
|
# @var StartLine: StartLine of a Function
|
||||||
|
@ -204,7 +210,7 @@ class FunctionClass(object):
|
||||||
FunNameStartLine = -1, FunNameStartColumn = -1):
|
FunNameStartLine = -1, FunNameStartColumn = -1):
|
||||||
self.ID = ID
|
self.ID = ID
|
||||||
self.Header = Header
|
self.Header = Header
|
||||||
self.Modifier = Modifier
|
self.Modifier = Modifier
|
||||||
self.Name = Name
|
self.Name = Name
|
||||||
self.ReturnStatement = ReturnStatement
|
self.ReturnStatement = ReturnStatement
|
||||||
self.StartLine = StartLine
|
self.StartLine = StartLine
|
||||||
|
@ -216,14 +222,14 @@ class FunctionClass(object):
|
||||||
self.BelongsToFile = BelongsToFile
|
self.BelongsToFile = BelongsToFile
|
||||||
self.FunNameStartLine = FunNameStartLine
|
self.FunNameStartLine = FunNameStartLine
|
||||||
self.FunNameStartColumn = FunNameStartColumn
|
self.FunNameStartColumn = FunNameStartColumn
|
||||||
|
|
||||||
self.IdentifierList = IdentifierList
|
self.IdentifierList = IdentifierList
|
||||||
self.PcdList = PcdList
|
self.PcdList = PcdList
|
||||||
|
|
||||||
## IdentifierClass
|
## IdentifierClass
|
||||||
#
|
#
|
||||||
# This class defines a structure of a variable
|
# This class defines a structure of a variable
|
||||||
#
|
#
|
||||||
# @param ID: ID of a Identifier
|
# @param ID: ID of a Identifier
|
||||||
# @param Modifier: Modifier of a Identifier
|
# @param Modifier: Modifier of a Identifier
|
||||||
# @param Type: Type of a Identifier
|
# @param Type: Type of a Identifier
|
||||||
|
@ -269,7 +275,7 @@ class IdentifierClass(object):
|
||||||
## PcdClass
|
## PcdClass
|
||||||
#
|
#
|
||||||
# This class defines a structure of a Pcd
|
# This class defines a structure of a Pcd
|
||||||
#
|
#
|
||||||
# @param ID: ID of a Pcd
|
# @param ID: ID of a Pcd
|
||||||
# @param CName: CName of a Pcd
|
# @param CName: CName of a Pcd
|
||||||
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
|
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
|
||||||
|
@ -314,7 +320,7 @@ class PcdDataClass(object):
|
||||||
## FileClass
|
## FileClass
|
||||||
#
|
#
|
||||||
# This class defines a structure of a file
|
# This class defines a structure of a file
|
||||||
#
|
#
|
||||||
# @param ID: ID of a File
|
# @param ID: ID of a File
|
||||||
# @param Name: Name of a File
|
# @param Name: Name of a File
|
||||||
# @param ExtName: ExtName of a File
|
# @param ExtName: ExtName of a File
|
||||||
|
@ -340,14 +346,14 @@ class PcdDataClass(object):
|
||||||
class FileClass(object):
|
class FileClass(object):
|
||||||
def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \
|
def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \
|
||||||
FunctionList = [], IdentifierList = [], PcdList = []):
|
FunctionList = [], IdentifierList = [], PcdList = []):
|
||||||
self.ID = ID
|
self.ID = ID
|
||||||
self.Name = Name
|
self.Name = Name
|
||||||
self.ExtName = ExtName
|
self.ExtName = ExtName
|
||||||
self.Path = Path
|
self.Path = Path
|
||||||
self.FullPath = FullPath
|
self.FullPath = FullPath
|
||||||
self.Model = Model
|
self.Model = Model
|
||||||
self.TimeStamp = TimeStamp
|
self.TimeStamp = TimeStamp
|
||||||
|
|
||||||
self.FunctionList = FunctionList
|
self.FunctionList = FunctionList
|
||||||
self.IdentifierList = IdentifierList
|
self.IdentifierList = IdentifierList
|
||||||
self.PcdList = PcdList
|
self.PcdList = PcdList
|
||||||
|
|
|
@ -30,6 +30,7 @@ class Check(object):
|
||||||
|
|
||||||
# Check all required checkpoints
|
# Check all required checkpoints
|
||||||
def Check(self):
|
def Check(self):
|
||||||
|
self.GeneralCheck()
|
||||||
self.MetaDataFileCheck()
|
self.MetaDataFileCheck()
|
||||||
self.DoxygenCheck()
|
self.DoxygenCheck()
|
||||||
self.IncludeFileCheck()
|
self.IncludeFileCheck()
|
||||||
|
@ -38,6 +39,29 @@ class Check(object):
|
||||||
self.FunctionLayoutCheck()
|
self.FunctionLayoutCheck()
|
||||||
self.NamingConventionCheck()
|
self.NamingConventionCheck()
|
||||||
|
|
||||||
|
# General Checking
|
||||||
|
def GeneralCheck(self):
|
||||||
|
self.GeneralCheckNonAcsii()
|
||||||
|
|
||||||
|
# Check whether file has non ACSII char
|
||||||
|
def GeneralCheckNonAcsii(self):
|
||||||
|
if EccGlobalData.gConfig.GeneralCheckNonAcsii == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
|
EdkLogger.quiet("Checking Non-ACSII char in file ...")
|
||||||
|
SqlCommand = """select ID, FullPath, ExtName from File"""
|
||||||
|
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
|
||||||
|
for Record in RecordSet:
|
||||||
|
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
|
||||||
|
op = open(Record[1]).readlines()
|
||||||
|
IndexOfLine = 0
|
||||||
|
for Line in op:
|
||||||
|
IndexOfLine += 1
|
||||||
|
IndexOfChar = 0
|
||||||
|
for Char in Line:
|
||||||
|
IndexOfChar += 1
|
||||||
|
if ord(Char) > 126:
|
||||||
|
OtherMsg = "File %s has Non-ASCII char at line %s column %s" %(Record[1], IndexOfLine, IndexOfChar)
|
||||||
|
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NON_ACSII, OtherMsg = OtherMsg, BelongsToTable = 'File', BelongsToItem = Record[0])
|
||||||
|
|
||||||
# C Function Layout Checking
|
# C Function Layout Checking
|
||||||
def FunctionLayoutCheck(self):
|
def FunctionLayoutCheck(self):
|
||||||
self.FunctionLayoutCheckReturnType()
|
self.FunctionLayoutCheckReturnType()
|
||||||
|
@ -67,22 +91,26 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.CFunctionLayoutCheckReturnType == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.CFunctionLayoutCheckReturnType == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking function layout return type ...")
|
EdkLogger.quiet("Checking function layout return type ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c', '.h'):
|
# if os.path.splitext(F)[1] in ('.c', '.h'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
c.CheckFuncLayoutReturnType(FullName)
|
# c.CheckFuncLayoutReturnType(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
c.CheckFuncLayoutReturnType(FullName)
|
||||||
|
|
||||||
# Check whether any optional functional modifiers exist and next to the return type
|
# Check whether any optional functional modifiers exist and next to the return type
|
||||||
def FunctionLayoutCheckModifier(self):
|
def FunctionLayoutCheckModifier(self):
|
||||||
if EccGlobalData.gConfig.CFunctionLayoutCheckOptionalFunctionalModifier == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.CFunctionLayoutCheckOptionalFunctionalModifier == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking function layout modifier ...")
|
EdkLogger.quiet("Checking function layout modifier ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c', '.h'):
|
# if os.path.splitext(F)[1] in ('.c', '.h'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
c.CheckFuncLayoutModifier(FullName)
|
# c.CheckFuncLayoutModifier(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
c.CheckFuncLayoutModifier(FullName)
|
||||||
|
|
||||||
# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
|
# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
|
||||||
# Check whether the closing parenthesis is on its own line and also indented two spaces
|
# Check whether the closing parenthesis is on its own line and also indented two spaces
|
||||||
|
@ -90,33 +118,41 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionName == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionName == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking function layout function name ...")
|
EdkLogger.quiet("Checking function layout function name ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c', '.h'):
|
# if os.path.splitext(F)[1] in ('.c', '.h'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
c.CheckFuncLayoutName(FullName)
|
# c.CheckFuncLayoutName(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
c.CheckFuncLayoutName(FullName)
|
||||||
|
|
||||||
# Check whether the function prototypes in include files have the same form as function definitions
|
# Check whether the function prototypes in include files have the same form as function definitions
|
||||||
def FunctionLayoutCheckPrototype(self):
|
def FunctionLayoutCheckPrototype(self):
|
||||||
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionPrototype == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionPrototype == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking function layout function prototype ...")
|
EdkLogger.quiet("Checking function layout function prototype ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c'):
|
# if os.path.splitext(F)[1] in ('.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
EdkLogger.quiet("[PROTOTYPE]" + FullName)
|
# EdkLogger.quiet("[PROTOTYPE]" + FullName)
|
||||||
c.CheckFuncLayoutPrototype(FullName)
|
# c.CheckFuncLayoutPrototype(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList:
|
||||||
|
EdkLogger.quiet("[PROTOTYPE]" + FullName)
|
||||||
|
c.CheckFuncLayoutPrototype(FullName)
|
||||||
|
|
||||||
# Check whether the body of a function is contained by open and close braces that must be in the first column
|
# Check whether the body of a function is contained by open and close braces that must be in the first column
|
||||||
def FunctionLayoutCheckBody(self):
|
def FunctionLayoutCheckBody(self):
|
||||||
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionBody == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionBody == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking function layout function body ...")
|
EdkLogger.quiet("Checking function layout function body ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c'):
|
# if os.path.splitext(F)[1] in ('.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
c.CheckFuncLayoutBody(FullName)
|
# c.CheckFuncLayoutBody(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList:
|
||||||
|
c.CheckFuncLayoutBody(FullName)
|
||||||
|
|
||||||
# Check whether the data declarations is the first code in a module.
|
# Check whether the data declarations is the first code in a module.
|
||||||
# self.CFunctionLayoutCheckDataDeclaration = 1
|
# self.CFunctionLayoutCheckDataDeclaration = 1
|
||||||
|
@ -125,11 +161,14 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.CFunctionLayoutCheckNoInitOfVariable == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.CFunctionLayoutCheckNoInitOfVariable == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking function layout local variables ...")
|
EdkLogger.quiet("Checking function layout local variables ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c'):
|
# if os.path.splitext(F)[1] in ('.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
c.CheckFuncLayoutLocalVariable(FullName)
|
# c.CheckFuncLayoutLocalVariable(FullName)
|
||||||
|
|
||||||
|
for FullName in EccGlobalData.gCFileList:
|
||||||
|
c.CheckFuncLayoutLocalVariable(FullName)
|
||||||
|
|
||||||
# Check whether no use of STATIC for functions
|
# Check whether no use of STATIC for functions
|
||||||
# self.CFunctionLayoutCheckNoStatic = 1
|
# self.CFunctionLayoutCheckNoStatic = 1
|
||||||
|
@ -150,22 +189,26 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.DeclarationDataTypeCheckNoUseCType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.DeclarationDataTypeCheckNoUseCType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking Declaration No use C type ...")
|
EdkLogger.quiet("Checking Declaration No use C type ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h', '.c'):
|
# if os.path.splitext(F)[1] in ('.h', '.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
c.CheckDeclNoUseCType(FullName)
|
# c.CheckDeclNoUseCType(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
c.CheckDeclNoUseCType(FullName)
|
||||||
|
|
||||||
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
|
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
|
||||||
def DeclCheckInOutModifier(self):
|
def DeclCheckInOutModifier(self):
|
||||||
if EccGlobalData.gConfig.DeclarationDataTypeCheckInOutModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.DeclarationDataTypeCheckInOutModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking Declaration argument modifier ...")
|
EdkLogger.quiet("Checking Declaration argument modifier ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h', '.c'):
|
# if os.path.splitext(F)[1] in ('.h', '.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
c.CheckDeclArgModifier(FullName)
|
# c.CheckDeclArgModifier(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
c.CheckDeclArgModifier(FullName)
|
||||||
|
|
||||||
# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
|
# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
|
||||||
def DeclCheckEFIAPIModifier(self):
|
def DeclCheckEFIAPIModifier(self):
|
||||||
|
@ -177,24 +220,30 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.DeclarationDataTypeCheckEnumeratedType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.DeclarationDataTypeCheckEnumeratedType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking Declaration enum typedef ...")
|
EdkLogger.quiet("Checking Declaration enum typedef ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h', '.c'):
|
# if os.path.splitext(F)[1] in ('.h', '.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
EdkLogger.quiet("[ENUM]" + FullName)
|
# EdkLogger.quiet("[ENUM]" + FullName)
|
||||||
c.CheckDeclEnumTypedef(FullName)
|
# c.CheckDeclEnumTypedef(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
EdkLogger.quiet("[ENUM]" + FullName)
|
||||||
|
c.CheckDeclEnumTypedef(FullName)
|
||||||
|
|
||||||
# Check whether Structure Type has a 'typedef' and the name is capital
|
# Check whether Structure Type has a 'typedef' and the name is capital
|
||||||
def DeclCheckStructureDeclaration(self):
|
def DeclCheckStructureDeclaration(self):
|
||||||
if EccGlobalData.gConfig.DeclarationDataTypeCheckStructureDeclaration == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.DeclarationDataTypeCheckStructureDeclaration == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking Declaration struct typedef ...")
|
EdkLogger.quiet("Checking Declaration struct typedef ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h', '.c'):
|
# if os.path.splitext(F)[1] in ('.h', '.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
EdkLogger.quiet("[STRUCT]" + FullName)
|
# EdkLogger.quiet("[STRUCT]" + FullName)
|
||||||
c.CheckDeclStructTypedef(FullName)
|
# c.CheckDeclStructTypedef(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
EdkLogger.quiet("[STRUCT]" + FullName)
|
||||||
|
c.CheckDeclStructTypedef(FullName)
|
||||||
|
|
||||||
# Check whether having same Structure
|
# Check whether having same Structure
|
||||||
def DeclCheckSameStructure(self):
|
def DeclCheckSameStructure(self):
|
||||||
|
@ -223,12 +272,15 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.DeclarationDataTypeCheckUnionType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.DeclarationDataTypeCheckUnionType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking Declaration union typedef ...")
|
EdkLogger.quiet("Checking Declaration union typedef ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h', '.c'):
|
# if os.path.splitext(F)[1] in ('.h', '.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
EdkLogger.quiet("[UNION]" + FullName)
|
# EdkLogger.quiet("[UNION]" + FullName)
|
||||||
c.CheckDeclUnionTypedef(FullName)
|
# c.CheckDeclUnionTypedef(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
EdkLogger.quiet("[UNION]" + FullName)
|
||||||
|
c.CheckDeclUnionTypedef(FullName)
|
||||||
|
|
||||||
# Predicate Expression Checking
|
# Predicate Expression Checking
|
||||||
def PredicateExpressionCheck(self):
|
def PredicateExpressionCheck(self):
|
||||||
|
@ -241,35 +293,46 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.PredicateExpressionCheckBooleanValue == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.PredicateExpressionCheckBooleanValue == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking predicate expression Boolean value ...")
|
EdkLogger.quiet("Checking predicate expression Boolean value ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c'):
|
# if os.path.splitext(F)[1] in ('.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
EdkLogger.quiet("[BOOLEAN]" + FullName)
|
# EdkLogger.quiet("[BOOLEAN]" + FullName)
|
||||||
c.CheckBooleanValueComparison(FullName)
|
# c.CheckBooleanValueComparison(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList:
|
||||||
|
EdkLogger.quiet("[BOOLEAN]" + FullName)
|
||||||
|
c.CheckBooleanValueComparison(FullName)
|
||||||
|
|
||||||
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
|
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
|
||||||
def PredicateExpressionCheckNonBooleanOperator(self):
|
def PredicateExpressionCheckNonBooleanOperator(self):
|
||||||
if EccGlobalData.gConfig.PredicateExpressionCheckNonBooleanOperator == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.PredicateExpressionCheckNonBooleanOperator == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking predicate expression Non-Boolean variable...")
|
EdkLogger.quiet("Checking predicate expression Non-Boolean variable...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c'):
|
# if os.path.splitext(F)[1] in ('.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
|
# EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
|
||||||
c.CheckNonBooleanValueComparison(FullName)
|
# c.CheckNonBooleanValueComparison(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList:
|
||||||
|
EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
|
||||||
|
c.CheckNonBooleanValueComparison(FullName)
|
||||||
|
|
||||||
# Check whether a comparison of any pointer to zero must be done via the NULL type
|
# Check whether a comparison of any pointer to zero must be done via the NULL type
|
||||||
def PredicateExpressionCheckComparisonNullType(self):
|
def PredicateExpressionCheckComparisonNullType(self):
|
||||||
if EccGlobalData.gConfig.PredicateExpressionCheckComparisonNullType == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.PredicateExpressionCheckComparisonNullType == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking predicate expression NULL pointer ...")
|
EdkLogger.quiet("Checking predicate expression NULL pointer ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.c'):
|
# if os.path.splitext(F)[1] in ('.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
EdkLogger.quiet("[POINTER]" + FullName)
|
# EdkLogger.quiet("[POINTER]" + FullName)
|
||||||
c.CheckPointerNullComparison(FullName)
|
# c.CheckPointerNullComparison(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList:
|
||||||
|
EdkLogger.quiet("[POINTER]" + FullName)
|
||||||
|
c.CheckPointerNullComparison(FullName)
|
||||||
|
|
||||||
# Include file checking
|
# Include file checking
|
||||||
def IncludeFileCheck(self):
|
def IncludeFileCheck(self):
|
||||||
self.IncludeFileCheckIfndef()
|
self.IncludeFileCheckIfndef()
|
||||||
|
@ -309,22 +372,26 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.IncludeFileCheckIfndefStatement == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.IncludeFileCheckIfndefStatement == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking header file ifndef ...")
|
EdkLogger.quiet("Checking header file ifndef ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h'):
|
# if os.path.splitext(F)[1] in ('.h'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
MsgList = c.CheckHeaderFileIfndef(FullName)
|
# MsgList = c.CheckHeaderFileIfndef(FullName)
|
||||||
|
for FullName in EccGlobalData.gHFileList:
|
||||||
|
MsgList = c.CheckHeaderFileIfndef(FullName)
|
||||||
|
|
||||||
# Check whether include files NOT contain code or define data variables
|
# Check whether include files NOT contain code or define data variables
|
||||||
def IncludeFileCheckData(self):
|
def IncludeFileCheckData(self):
|
||||||
if EccGlobalData.gConfig.IncludeFileCheckData == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.IncludeFileCheckData == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking header file data ...")
|
EdkLogger.quiet("Checking header file data ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h'):
|
# if os.path.splitext(F)[1] in ('.h'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
MsgList = c.CheckHeaderFileData(FullName)
|
# MsgList = c.CheckHeaderFileData(FullName)
|
||||||
|
for FullName in EccGlobalData.gHFileList:
|
||||||
|
MsgList = c.CheckHeaderFileData(FullName)
|
||||||
|
|
||||||
# Doxygen document checking
|
# Doxygen document checking
|
||||||
def DoxygenCheck(self):
|
def DoxygenCheck(self):
|
||||||
|
@ -347,24 +414,28 @@ class Check(object):
|
||||||
MsgList = c.CheckFileHeaderDoxygenComments(FullName)
|
MsgList = c.CheckFileHeaderDoxygenComments(FullName)
|
||||||
elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):
|
elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
FullName = os.path.join(Dirpath, F)
|
||||||
if not open(FullName).read().startswith('## @file'):
|
op = open(FullName).readlines()
|
||||||
|
if not op[0].startswith('## @file') and op[6].startswith('## @file') and op[7].startswith('## @file'):
|
||||||
SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
|
SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
|
||||||
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
|
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
|
||||||
for Result in ResultSet:
|
for Result in ResultSet:
|
||||||
Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file""'
|
Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file""'
|
||||||
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
|
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
|
||||||
|
|
||||||
|
|
||||||
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
|
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
|
||||||
def DoxygenCheckFunctionHeader(self):
|
def DoxygenCheckFunctionHeader(self):
|
||||||
if EccGlobalData.gConfig.DoxygenCheckFunctionHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.DoxygenCheckFunctionHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking Doxygen function header ...")
|
EdkLogger.quiet("Checking Doxygen function header ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h', '.c'):
|
# if os.path.splitext(F)[1] in ('.h', '.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
|
# MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
|
||||||
|
|
||||||
|
|
||||||
# Check whether the first line of text in a comment block is a brief description of the element being documented.
|
# Check whether the first line of text in a comment block is a brief description of the element being documented.
|
||||||
# The brief description must end with a period.
|
# The brief description must end with a period.
|
||||||
|
@ -377,22 +448,26 @@ class Check(object):
|
||||||
if EccGlobalData.gConfig.DoxygenCheckCommentFormat == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.DoxygenCheckCommentFormat == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking Doxygen comment ///< ...")
|
EdkLogger.quiet("Checking Doxygen comment ///< ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h', '.c'):
|
# if os.path.splitext(F)[1] in ('.h', '.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
|
# MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
|
||||||
|
|
||||||
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
|
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
|
||||||
def DoxygenCheckCommand(self):
|
def DoxygenCheckCommand(self):
|
||||||
if EccGlobalData.gConfig.DoxygenCheckCommand == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.DoxygenCheckCommand == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
EdkLogger.quiet("Checking Doxygen command ...")
|
EdkLogger.quiet("Checking Doxygen command ...")
|
||||||
|
|
||||||
for Dirpath, Dirnames, Filenames in self.WalkTree():
|
# for Dirpath, Dirnames, Filenames in self.WalkTree():
|
||||||
for F in Filenames:
|
# for F in Filenames:
|
||||||
if os.path.splitext(F)[1] in ('.h', '.c'):
|
# if os.path.splitext(F)[1] in ('.h', '.c'):
|
||||||
FullName = os.path.join(Dirpath, F)
|
# FullName = os.path.join(Dirpath, F)
|
||||||
MsgList = c.CheckDoxygenCommand(FullName)
|
# MsgList = c.CheckDoxygenCommand(FullName)
|
||||||
|
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
|
||||||
|
MsgList = c.CheckDoxygenCommand(FullName)
|
||||||
|
|
||||||
# Meta-Data File Processing Checking
|
# Meta-Data File Processing Checking
|
||||||
def MetaDataFileCheck(self):
|
def MetaDataFileCheck(self):
|
||||||
|
@ -556,7 +631,6 @@ class Check(object):
|
||||||
SqlCommand2 = """select Name from File where ID = %s""" %Record[5]
|
SqlCommand2 = """select Name from File where ID = %s""" %Record[5]
|
||||||
DscFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand1)[0][0])[0]
|
DscFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand1)[0][0])[0]
|
||||||
FdfFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand2)[0][0])[0]
|
FdfFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand2)[0][0])[0]
|
||||||
print DscFileName, 111, FdfFileName
|
|
||||||
if DscFileName != FdfFileName:
|
if DscFileName != FdfFileName:
|
||||||
continue
|
continue
|
||||||
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):
|
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):
|
||||||
|
@ -680,8 +754,8 @@ class Check(object):
|
||||||
SqlCommand = """
|
SqlCommand = """
|
||||||
select ID from File where FullPath in
|
select ID from File where FullPath in
|
||||||
(select B.Path || '\\' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s
|
(select B.Path || '\\' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s
|
||||||
and B.ID = %s)
|
and B.ID = %s and (B.Model = %s or B.Model = %s))
|
||||||
""" %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile)
|
""" %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile, MODEL_FILE_C, MODEL_FILE_H)
|
||||||
TableSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
|
TableSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
|
||||||
for Tbl in TableSet:
|
for Tbl in TableSet:
|
||||||
TblName = 'Identifier' + str(Tbl[0])
|
TblName = 'Identifier' + str(Tbl[0])
|
||||||
|
@ -714,7 +788,7 @@ class Check(object):
|
||||||
if Path.startswith('\\') or Path.startswith('/'):
|
if Path.startswith('\\') or Path.startswith('/'):
|
||||||
Path = Path[1:]
|
Path = Path[1:]
|
||||||
return Path
|
return Path
|
||||||
|
|
||||||
# Check whether two module INFs under one workspace has the same FILE_GUID value
|
# Check whether two module INFs under one workspace has the same FILE_GUID value
|
||||||
def MetaDataFileCheckModuleFileGuidDuplication(self):
|
def MetaDataFileCheckModuleFileGuidDuplication(self):
|
||||||
if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidDuplication == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidDuplication == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
|
||||||
|
@ -733,7 +807,7 @@ class Check(object):
|
||||||
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, InfPath1):
|
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, InfPath1):
|
||||||
Msg = "The FILE_GUID of INF file [%s] is duplicated with that of %s" % (InfPath1, InfPath2)
|
Msg = "The FILE_GUID of INF file [%s] is duplicated with that of %s" % (InfPath1, InfPath2)
|
||||||
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, OtherMsg = Msg, BelongsToTable = Table.Table, BelongsToItem = Record[0])
|
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, OtherMsg = Msg, BelongsToTable = Table.Table, BelongsToItem = Record[0])
|
||||||
|
|
||||||
|
|
||||||
# Check whether these is duplicate Guid/Ppi/Protocol name
|
# Check whether these is duplicate Guid/Ppi/Protocol name
|
||||||
def CheckGuidProtocolPpi(self, ErrorID, Model, Table):
|
def CheckGuidProtocolPpi(self, ErrorID, Model, Table):
|
||||||
|
|
|
@ -28,7 +28,7 @@ from Common.String import *
|
||||||
class Configuration(object):
|
class Configuration(object):
|
||||||
def __init__(self, Filename):
|
def __init__(self, Filename):
|
||||||
self.Filename = Filename
|
self.Filename = Filename
|
||||||
|
|
||||||
self.Version = 0.1
|
self.Version = 0.1
|
||||||
|
|
||||||
## Identify to if check all items
|
## Identify to if check all items
|
||||||
|
@ -49,14 +49,14 @@ class Configuration(object):
|
||||||
# SpaceCheckAll
|
# SpaceCheckAll
|
||||||
#
|
#
|
||||||
self.AutoCorrect = 0
|
self.AutoCorrect = 0
|
||||||
|
|
||||||
# List customized Modifer here, split with ','
|
# List customized Modifer here, split with ','
|
||||||
# Defaultly use the definition in class DataType
|
# Defaultly use the definition in class DataType
|
||||||
self.ModifierList = MODIFIER_LIST
|
self.ModifierList = MODIFIER_LIST
|
||||||
|
|
||||||
## General Checking
|
## General Checking
|
||||||
self.GeneralCheckAll = 0
|
self.GeneralCheckAll = 0
|
||||||
|
|
||||||
# Check whether NO Tab is used, replaced with spaces
|
# Check whether NO Tab is used, replaced with spaces
|
||||||
self.GeneralCheckNoTab = 1
|
self.GeneralCheckNoTab = 1
|
||||||
# The width of Tab
|
# The width of Tab
|
||||||
|
@ -77,31 +77,33 @@ class Configuration(object):
|
||||||
self.GeneralCheckCarriageReturn = 1
|
self.GeneralCheckCarriageReturn = 1
|
||||||
# Check whether the file exists
|
# Check whether the file exists
|
||||||
self.GeneralCheckFileExistence = 1
|
self.GeneralCheckFileExistence = 1
|
||||||
|
# Check whether file has non ACSII char
|
||||||
|
self.GeneralCheckNonAcsii = 1
|
||||||
|
|
||||||
## Space Checking
|
## Space Checking
|
||||||
self.SpaceCheckAll = 1
|
self.SpaceCheckAll = 1
|
||||||
|
|
||||||
## Predicate Expression Checking
|
## Predicate Expression Checking
|
||||||
self.PredicateExpressionCheckAll = 0
|
self.PredicateExpressionCheckAll = 0
|
||||||
|
|
||||||
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
|
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
|
||||||
self.PredicateExpressionCheckBooleanValue = 1
|
self.PredicateExpressionCheckBooleanValue = 1
|
||||||
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
|
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
|
||||||
self.PredicateExpressionCheckNonBooleanOperator = 1
|
self.PredicateExpressionCheckNonBooleanOperator = 1
|
||||||
# Check whether a comparison of any pointer to zero must be done via the NULL type
|
# Check whether a comparison of any pointer to zero must be done via the NULL type
|
||||||
self.PredicateExpressionCheckComparisonNullType = 1
|
self.PredicateExpressionCheckComparisonNullType = 1
|
||||||
|
|
||||||
## Headers Checking
|
## Headers Checking
|
||||||
self.HeaderCheckAll = 0
|
self.HeaderCheckAll = 0
|
||||||
|
|
||||||
# Check whether File header exists
|
# Check whether File header exists
|
||||||
self.HeaderCheckFile = 1
|
self.HeaderCheckFile = 1
|
||||||
# Check whether Function header exists
|
# Check whether Function header exists
|
||||||
self.HeaderCheckFunction = 1
|
self.HeaderCheckFunction = 1
|
||||||
|
|
||||||
## C Function Layout Checking
|
## C Function Layout Checking
|
||||||
self.CFunctionLayoutCheckAll = 0
|
self.CFunctionLayoutCheckAll = 0
|
||||||
|
|
||||||
# Check whether return type exists and in the first line
|
# Check whether return type exists and in the first line
|
||||||
self.CFunctionLayoutCheckReturnType = 1
|
self.CFunctionLayoutCheckReturnType = 1
|
||||||
# Check whether any optional functional modifiers exist and next to the return type
|
# Check whether any optional functional modifiers exist and next to the return type
|
||||||
|
@ -119,10 +121,10 @@ class Configuration(object):
|
||||||
self.CFunctionLayoutCheckNoInitOfVariable = 1
|
self.CFunctionLayoutCheckNoInitOfVariable = 1
|
||||||
# Check whether no use of STATIC for functions
|
# Check whether no use of STATIC for functions
|
||||||
self.CFunctionLayoutCheckNoStatic = 1
|
self.CFunctionLayoutCheckNoStatic = 1
|
||||||
|
|
||||||
## Include Files Checking
|
## Include Files Checking
|
||||||
self.IncludeFileCheckAll = 0
|
self.IncludeFileCheckAll = 0
|
||||||
|
|
||||||
#Check whether having include files with same name
|
#Check whether having include files with same name
|
||||||
self.IncludeFileCheckSameName = 1
|
self.IncludeFileCheckSameName = 1
|
||||||
# Check whether all include file contents is guarded by a #ifndef statement.
|
# Check whether all include file contents is guarded by a #ifndef statement.
|
||||||
|
@ -132,10 +134,10 @@ class Configuration(object):
|
||||||
# Check whether include files contain only public or only private data
|
# Check whether include files contain only public or only private data
|
||||||
# Check whether include files NOT contain code or define data variables
|
# Check whether include files NOT contain code or define data variables
|
||||||
self.IncludeFileCheckData = 1
|
self.IncludeFileCheckData = 1
|
||||||
|
|
||||||
## Declarations and Data Types Checking
|
## Declarations and Data Types Checking
|
||||||
self.DeclarationDataTypeCheckAll = 0
|
self.DeclarationDataTypeCheckAll = 0
|
||||||
|
|
||||||
# Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.
|
# Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.
|
||||||
self.DeclarationDataTypeCheckNoUseCType = 1
|
self.DeclarationDataTypeCheckNoUseCType = 1
|
||||||
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
|
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
|
||||||
|
@ -150,10 +152,10 @@ class Configuration(object):
|
||||||
self.DeclarationDataTypeCheckSameStructure = 1
|
self.DeclarationDataTypeCheckSameStructure = 1
|
||||||
# Check whether Union Type has a 'typedef' and the name is capital
|
# Check whether Union Type has a 'typedef' and the name is capital
|
||||||
self.DeclarationDataTypeCheckUnionType = 1
|
self.DeclarationDataTypeCheckUnionType = 1
|
||||||
|
|
||||||
## Naming Conventions Checking
|
## Naming Conventions Checking
|
||||||
self.NamingConventionCheckAll = 0
|
self.NamingConventionCheckAll = 0
|
||||||
|
|
||||||
# Check whether only capital letters are used for #define declarations
|
# Check whether only capital letters are used for #define declarations
|
||||||
self.NamingConventionCheckDefineStatement = 1
|
self.NamingConventionCheckDefineStatement = 1
|
||||||
# Check whether only capital letters are used for typedef declarations
|
# Check whether only capital letters are used for typedef declarations
|
||||||
|
@ -172,33 +174,33 @@ class Configuration(object):
|
||||||
self.NamingConventionCheckFunctionName = 1
|
self.NamingConventionCheckFunctionName = 1
|
||||||
# Check whether NO use short variable name with single character
|
# Check whether NO use short variable name with single character
|
||||||
self.NamingConventionCheckSingleCharacterVariable = 1
|
self.NamingConventionCheckSingleCharacterVariable = 1
|
||||||
|
|
||||||
## Doxygen Checking
|
## Doxygen Checking
|
||||||
self.DoxygenCheckAll = 0
|
self.DoxygenCheckAll = 0
|
||||||
|
|
||||||
# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
|
# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
|
||||||
self.DoxygenCheckFileHeader = 1
|
self.DoxygenCheckFileHeader = 1
|
||||||
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
|
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
|
||||||
self.DoxygenCheckFunctionHeader = 1
|
self.DoxygenCheckFunctionHeader = 1
|
||||||
# Check whether the first line of text in a comment block is a brief description of the element being documented.
|
# Check whether the first line of text in a comment block is a brief description of the element being documented.
|
||||||
# The brief description must end with a period.
|
# The brief description must end with a period.
|
||||||
self.DoxygenCheckCommentDescription = 1
|
self.DoxygenCheckCommentDescription = 1
|
||||||
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
|
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
|
||||||
self.DoxygenCheckCommentFormat = 1
|
self.DoxygenCheckCommentFormat = 1
|
||||||
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
|
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
|
||||||
self.DoxygenCheckCommand = 1
|
self.DoxygenCheckCommand = 1
|
||||||
|
|
||||||
## Meta-Data File Processing Checking
|
## Meta-Data File Processing Checking
|
||||||
self.MetaDataFileCheckAll = 0
|
self.MetaDataFileCheckAll = 0
|
||||||
|
|
||||||
# Check whether each file defined in meta-data exists
|
# Check whether each file defined in meta-data exists
|
||||||
self.MetaDataFileCheckPathName = 1
|
self.MetaDataFileCheckPathName = 1
|
||||||
# Generate a list for all files defined in meta-data files
|
# Generate a list for all files defined in meta-data files
|
||||||
self.MetaDataFileCheckGenerateFileList = 1
|
self.MetaDataFileCheckGenerateFileList = 1
|
||||||
# The path of log file
|
# The path of log file
|
||||||
self.MetaDataFileCheckPathOfGenerateFileList = 'File.log'
|
self.MetaDataFileCheckPathOfGenerateFileList = 'File.log'
|
||||||
# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
|
# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
|
||||||
# Each Library Instance must specify the Supported Module Types in its INF file,
|
# Each Library Instance must specify the Supported Module Types in its INF file,
|
||||||
# and any module specifying the library instance must be one of the supported types.
|
# and any module specifying the library instance must be one of the supported types.
|
||||||
self.MetaDataFileCheckLibraryInstance = 1
|
self.MetaDataFileCheckLibraryInstance = 1
|
||||||
# Check whether a Library Instance has been defined for all dependent library classes
|
# Check whether a Library Instance has been defined for all dependent library classes
|
||||||
|
@ -235,14 +237,17 @@ class Configuration(object):
|
||||||
# The directory listed here will not be parsed, split with ','
|
# The directory listed here will not be parsed, split with ','
|
||||||
self.SkipDirList = []
|
self.SkipDirList = []
|
||||||
|
|
||||||
|
# A list for binary file ext name
|
||||||
|
self.BinaryExtList = []
|
||||||
|
|
||||||
self.ParseConfig()
|
self.ParseConfig()
|
||||||
|
|
||||||
def ParseConfig(self):
|
def ParseConfig(self):
|
||||||
Filepath = os.path.normpath(self.Filename)
|
Filepath = os.path.normpath(self.Filename)
|
||||||
if not os.path.isfile(Filepath):
|
if not os.path.isfile(Filepath):
|
||||||
ErrorMsg = "Can't find configuration file '%s'" % Filepath
|
ErrorMsg = "Can't find configuration file '%s'" % Filepath
|
||||||
EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)
|
EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)
|
||||||
|
|
||||||
LineNo = 0
|
LineNo = 0
|
||||||
for Line in open(Filepath, 'r'):
|
for Line in open(Filepath, 'r'):
|
||||||
LineNo = LineNo + 1
|
LineNo = LineNo + 1
|
||||||
|
@ -258,8 +263,10 @@ class Configuration(object):
|
||||||
continue
|
continue
|
||||||
if List[0] == 'SkipDirList':
|
if List[0] == 'SkipDirList':
|
||||||
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
|
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
|
||||||
|
if List[0] == 'BinaryExtList':
|
||||||
|
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
|
||||||
self.__dict__[List[0]] = List[1]
|
self.__dict__[List[0]] = List[1]
|
||||||
|
|
||||||
def ShowMe(self):
|
def ShowMe(self):
|
||||||
print self.Filename
|
print self.Filename
|
||||||
for Key in self.__dict__.keys():
|
for Key in self.__dict__.keys():
|
||||||
|
|
|
@ -41,7 +41,7 @@ DATABASE_PATH = "Ecc.db"
|
||||||
# This class defined the ECC databse
|
# This class defined the ECC databse
|
||||||
# During the phase of initialization, the database will create all tables and
|
# During the phase of initialization, the database will create all tables and
|
||||||
# insert all records of table DataModel
|
# insert all records of table DataModel
|
||||||
#
|
#
|
||||||
# @param object: Inherited from object class
|
# @param object: Inherited from object class
|
||||||
# @param DbPath: A string for the path of the ECC database
|
# @param DbPath: A string for the path of the ECC database
|
||||||
#
|
#
|
||||||
|
@ -64,7 +64,7 @@ class Database(object):
|
||||||
self.TblDec = None
|
self.TblDec = None
|
||||||
self.TblDsc = None
|
self.TblDsc = None
|
||||||
self.TblFdf = None
|
self.TblFdf = None
|
||||||
|
|
||||||
## Initialize ECC database
|
## Initialize ECC database
|
||||||
#
|
#
|
||||||
# 1. Delete all old existing tables
|
# 1. Delete all old existing tables
|
||||||
|
@ -85,7 +85,7 @@ class Database(object):
|
||||||
# to avoid non-ascii charater conversion error
|
# to avoid non-ascii charater conversion error
|
||||||
self.Conn.text_factory = str
|
self.Conn.text_factory = str
|
||||||
self.Cur = self.Conn.cursor()
|
self.Cur = self.Conn.cursor()
|
||||||
|
|
||||||
self.TblDataModel = TableDataModel(self.Cur)
|
self.TblDataModel = TableDataModel(self.Cur)
|
||||||
self.TblFile = TableFile(self.Cur)
|
self.TblFile = TableFile(self.Cur)
|
||||||
self.TblFunction = TableFunction(self.Cur)
|
self.TblFunction = TableFunction(self.Cur)
|
||||||
|
@ -96,7 +96,7 @@ class Database(object):
|
||||||
self.TblDec = TableDec(self.Cur)
|
self.TblDec = TableDec(self.Cur)
|
||||||
self.TblDsc = TableDsc(self.Cur)
|
self.TblDsc = TableDsc(self.Cur)
|
||||||
self.TblFdf = TableFdf(self.Cur)
|
self.TblFdf = TableFdf(self.Cur)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Create new tables
|
# Create new tables
|
||||||
#
|
#
|
||||||
|
@ -110,7 +110,7 @@ class Database(object):
|
||||||
self.TblDec.Create()
|
self.TblDec.Create()
|
||||||
self.TblDsc.Create()
|
self.TblDsc.Create()
|
||||||
self.TblFdf.Create()
|
self.TblFdf.Create()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Init each table's ID
|
# Init each table's ID
|
||||||
#
|
#
|
||||||
|
@ -123,13 +123,13 @@ class Database(object):
|
||||||
self.TblDec.InitID()
|
self.TblDec.InitID()
|
||||||
self.TblDsc.InitID()
|
self.TblDsc.InitID()
|
||||||
self.TblFdf.InitID()
|
self.TblFdf.InitID()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Initialize table DataModel
|
# Initialize table DataModel
|
||||||
#
|
#
|
||||||
if NewDatabase:
|
if NewDatabase:
|
||||||
self.TblDataModel.InitTable()
|
self.TblDataModel.InitTable()
|
||||||
|
|
||||||
EdkLogger.verbose("Initialize ECC database ... DONE!")
|
EdkLogger.verbose("Initialize ECC database ... DONE!")
|
||||||
|
|
||||||
## Query a table
|
## Query a table
|
||||||
|
@ -138,7 +138,7 @@ class Database(object):
|
||||||
#
|
#
|
||||||
def QueryTable(self, Table):
|
def QueryTable(self, Table):
|
||||||
Table.Query()
|
Table.Query()
|
||||||
|
|
||||||
## Close entire database
|
## Close entire database
|
||||||
#
|
#
|
||||||
# Commit all first
|
# Commit all first
|
||||||
|
@ -147,15 +147,15 @@ class Database(object):
|
||||||
def Close(self):
|
def Close(self):
|
||||||
#
|
#
|
||||||
# Commit to file
|
# Commit to file
|
||||||
#
|
#
|
||||||
self.Conn.commit()
|
self.Conn.commit()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Close connection and cursor
|
# Close connection and cursor
|
||||||
#
|
#
|
||||||
self.Cur.close()
|
self.Cur.close()
|
||||||
self.Conn.close()
|
self.Conn.close()
|
||||||
|
|
||||||
## Insert one file information
|
## Insert one file information
|
||||||
#
|
#
|
||||||
# Insert one file's information to the database
|
# Insert one file's information to the database
|
||||||
|
@ -171,43 +171,44 @@ class Database(object):
|
||||||
# Insert a record for file
|
# Insert a record for file
|
||||||
#
|
#
|
||||||
FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
|
FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
|
||||||
IdTable = TableIdentifier(self.Cur)
|
|
||||||
IdTable.Table = "Identifier%s" % FileID
|
|
||||||
IdTable.Create()
|
|
||||||
|
|
||||||
#
|
if File.Model == DataClass.MODEL_FILE_C or File.Model == DataClass.MODEL_FILE_H:
|
||||||
# Insert function of file
|
IdTable = TableIdentifier(self.Cur)
|
||||||
#
|
IdTable.Table = "Identifier%s" % FileID
|
||||||
for Function in File.FunctionList:
|
IdTable.Create()
|
||||||
FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
|
|
||||||
Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
|
|
||||||
Function.BodyStartLine, Function.BodyStartColumn, FileID, \
|
|
||||||
Function.FunNameStartLine, Function.FunNameStartColumn)
|
|
||||||
#
|
#
|
||||||
# Insert Identifier of function
|
# Insert function of file
|
||||||
#
|
#
|
||||||
for Identifier in Function.IdentifierList:
|
for Function in File.FunctionList:
|
||||||
|
FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
|
||||||
|
Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
|
||||||
|
Function.BodyStartLine, Function.BodyStartColumn, FileID, \
|
||||||
|
Function.FunNameStartLine, Function.FunNameStartColumn)
|
||||||
|
#
|
||||||
|
# Insert Identifier of function
|
||||||
|
#
|
||||||
|
for Identifier in Function.IdentifierList:
|
||||||
|
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
|
||||||
|
FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
|
||||||
|
#
|
||||||
|
# Insert Pcd of function
|
||||||
|
#
|
||||||
|
for Pcd in Function.PcdList:
|
||||||
|
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
|
||||||
|
FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
|
||||||
|
#
|
||||||
|
# Insert Identifier of file
|
||||||
|
#
|
||||||
|
for Identifier in File.IdentifierList:
|
||||||
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
|
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
|
||||||
FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
|
FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
|
||||||
#
|
#
|
||||||
# Insert Pcd of function
|
# Insert Pcd of file
|
||||||
#
|
#
|
||||||
for Pcd in Function.PcdList:
|
for Pcd in File.PcdList:
|
||||||
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
|
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
|
||||||
FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
|
FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
|
||||||
#
|
|
||||||
# Insert Identifier of file
|
|
||||||
#
|
|
||||||
for Identifier in File.IdentifierList:
|
|
||||||
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
|
|
||||||
FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
|
|
||||||
#
|
|
||||||
# Insert Pcd of file
|
|
||||||
#
|
|
||||||
for Pcd in File.PcdList:
|
|
||||||
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
|
|
||||||
FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
|
|
||||||
|
|
||||||
EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
|
EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
|
||||||
|
|
||||||
## UpdateIdentifierBelongsToFunction
|
## UpdateIdentifierBelongsToFunction
|
||||||
|
@ -217,7 +218,7 @@ class Database(object):
|
||||||
#
|
#
|
||||||
def UpdateIdentifierBelongsToFunction_disabled(self):
|
def UpdateIdentifierBelongsToFunction_disabled(self):
|
||||||
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
|
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
|
||||||
|
|
||||||
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""
|
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""
|
||||||
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
||||||
self.Cur.execute(SqlCommand)
|
self.Cur.execute(SqlCommand)
|
||||||
|
@ -233,7 +234,7 @@ class Database(object):
|
||||||
# Check whether an identifier belongs to a function
|
# Check whether an identifier belongs to a function
|
||||||
#
|
#
|
||||||
EdkLogger.debug(4, "For common identifiers ... ")
|
EdkLogger.debug(4, "For common identifiers ... ")
|
||||||
SqlCommand = """select ID from Function
|
SqlCommand = """select ID from Function
|
||||||
where StartLine < %s and EndLine > %s
|
where StartLine < %s and EndLine > %s
|
||||||
and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)
|
and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)
|
||||||
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
||||||
|
@ -243,13 +244,13 @@ class Database(object):
|
||||||
SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)
|
SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)
|
||||||
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
||||||
self.Cur.execute(SqlCommand)
|
self.Cur.execute(SqlCommand)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Check whether the identifier is a function header
|
# Check whether the identifier is a function header
|
||||||
#
|
#
|
||||||
EdkLogger.debug(4, "For function headers ... ")
|
EdkLogger.debug(4, "For function headers ... ")
|
||||||
if Model == DataClass.MODEL_IDENTIFIER_COMMENT:
|
if Model == DataClass.MODEL_IDENTIFIER_COMMENT:
|
||||||
SqlCommand = """select ID from Function
|
SqlCommand = """select ID from Function
|
||||||
where StartLine = %s + 1
|
where StartLine = %s + 1
|
||||||
and BelongsToFile = %s""" % (EndLine, BelongsToFile)
|
and BelongsToFile = %s""" % (EndLine, BelongsToFile)
|
||||||
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
||||||
|
@ -259,7 +260,7 @@ class Database(object):
|
||||||
SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)
|
SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)
|
||||||
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
|
||||||
self.Cur.execute(SqlCommand)
|
self.Cur.execute(SqlCommand)
|
||||||
|
|
||||||
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
|
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
|
||||||
|
|
||||||
|
|
||||||
|
@ -270,7 +271,7 @@ class Database(object):
|
||||||
#
|
#
|
||||||
def UpdateIdentifierBelongsToFunction(self):
|
def UpdateIdentifierBelongsToFunction(self):
|
||||||
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
|
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
|
||||||
|
|
||||||
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
|
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
|
||||||
Records = self.TblFunction.Exec(SqlCommand)
|
Records = self.TblFunction.Exec(SqlCommand)
|
||||||
Data1 = []
|
Data1 = []
|
||||||
|
@ -308,7 +309,7 @@ class Database(object):
|
||||||
# self.Cur.executemany(SqlCommand, Data2)
|
# self.Cur.executemany(SqlCommand, Data2)
|
||||||
#
|
#
|
||||||
# EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
|
# EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
#
|
#
|
||||||
|
@ -320,11 +321,11 @@ if __name__ == '__main__':
|
||||||
#EdkLogger.SetLevel(EdkLogger.VERBOSE)
|
#EdkLogger.SetLevel(EdkLogger.VERBOSE)
|
||||||
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
|
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
|
||||||
EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
|
EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
|
||||||
|
|
||||||
Db = Database(DATABASE_PATH)
|
Db = Database(DATABASE_PATH)
|
||||||
Db.InitDatabase()
|
Db.InitDatabase()
|
||||||
Db.QueryTable(Db.TblDataModel)
|
Db.QueryTable(Db.TblDataModel)
|
||||||
|
|
||||||
identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
|
identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
|
||||||
identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
|
identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
|
||||||
identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
|
identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
|
||||||
|
@ -333,12 +334,12 @@ if __name__ == '__main__':
|
||||||
file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
|
file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
|
||||||
Db.InsertOneFile(file)
|
Db.InsertOneFile(file)
|
||||||
Db.UpdateIdentifierBelongsToFunction()
|
Db.UpdateIdentifierBelongsToFunction()
|
||||||
|
|
||||||
Db.QueryTable(Db.TblFile)
|
Db.QueryTable(Db.TblFile)
|
||||||
Db.QueryTable(Db.TblFunction)
|
Db.QueryTable(Db.TblFunction)
|
||||||
Db.QueryTable(Db.TblPcd)
|
Db.QueryTable(Db.TblPcd)
|
||||||
Db.QueryTable(Db.TblIdentifier)
|
Db.QueryTable(Db.TblIdentifier)
|
||||||
|
|
||||||
Db.Close()
|
Db.Close()
|
||||||
EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
|
EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
|
||||||
|
|
||||||
|
|
|
@ -106,6 +106,8 @@ class Ecc(object):
|
||||||
self.BuildMetaDataFileDatabase()
|
self.BuildMetaDataFileDatabase()
|
||||||
|
|
||||||
EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)
|
EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)
|
||||||
|
EccGlobalData.gCFileList = GetFileList(MODEL_FILE_C, EccGlobalData.gDb)
|
||||||
|
EccGlobalData.gHFileList = GetFileList(MODEL_FILE_H, EccGlobalData.gDb)
|
||||||
|
|
||||||
## BuildMetaDataFileDatabase
|
## BuildMetaDataFileDatabase
|
||||||
#
|
#
|
||||||
|
@ -227,7 +229,7 @@ class Ecc(object):
|
||||||
|
|
||||||
if Options.Workspace:
|
if Options.Workspace:
|
||||||
os.environ["WORKSPACE"] = Options.Workspace
|
os.environ["WORKSPACE"] = Options.Workspace
|
||||||
|
|
||||||
# Check workspace envirnoment
|
# Check workspace envirnoment
|
||||||
if "WORKSPACE" not in os.environ:
|
if "WORKSPACE" not in os.environ:
|
||||||
EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
|
EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
|
||||||
|
|
|
@ -21,4 +21,6 @@ gTarget = ''
|
||||||
gConfig = None
|
gConfig = None
|
||||||
gDb = None
|
gDb = None
|
||||||
gIdentifierTableList = []
|
gIdentifierTableList = []
|
||||||
|
gCFileList = []
|
||||||
|
gHFileList = []
|
||||||
gException = None
|
gException = None
|
|
@ -19,6 +19,7 @@ ERROR_GENERAL_CHECK_NO_ASM = 1004
|
||||||
ERROR_GENERAL_CHECK_NO_PROGMA = 1005
|
ERROR_GENERAL_CHECK_NO_PROGMA = 1005
|
||||||
ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006
|
ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006
|
||||||
ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007
|
ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007
|
||||||
|
ERROR_GENERAL_CHECK_NON_ACSII = 1008
|
||||||
|
|
||||||
ERROR_SPACE_CHECK_ALL = 2000
|
ERROR_SPACE_CHECK_ALL = 2000
|
||||||
|
|
||||||
|
@ -105,6 +106,7 @@ gEccErrorMessage = {
|
||||||
ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",
|
ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",
|
||||||
ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",
|
ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",
|
||||||
ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",
|
ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",
|
||||||
|
ERROR_GENERAL_CHECK_NON_ACSII : "File has invalid Non-ACSII char",
|
||||||
|
|
||||||
ERROR_SPACE_CHECK_ALL : "",
|
ERROR_SPACE_CHECK_ALL : "",
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ def GetIncludeListOfFile(WorkSpace, Filepath, Db):
|
||||||
Filepath = os.path.normpath(Filepath)
|
Filepath = os.path.normpath(Filepath)
|
||||||
SqlCommand = """
|
SqlCommand = """
|
||||||
select Value1, FullPath from Inf, File where Inf.Model = %s and Inf.BelongsToFile in(
|
select Value1, FullPath from Inf, File where Inf.Model = %s and Inf.BelongsToFile in(
|
||||||
select distinct B.BelongsToFile from File as A left join Inf as B
|
select distinct B.BelongsToFile from File as A left join Inf as B
|
||||||
where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')
|
where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')
|
||||||
and Inf.BelongsToFile = File.ID""" \
|
and Inf.BelongsToFile = File.ID""" \
|
||||||
% (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)
|
% (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)
|
||||||
|
@ -36,7 +36,7 @@ def GetIncludeListOfFile(WorkSpace, Filepath, Db):
|
||||||
InfFullPath = os.path.normpath(os.path.join(WorkSpace, Record[1]))
|
InfFullPath = os.path.normpath(os.path.join(WorkSpace, Record[1]))
|
||||||
(DecPath, DecName) = os.path.split(DecFullPath)
|
(DecPath, DecName) = os.path.split(DecFullPath)
|
||||||
(InfPath, InfName) = os.path.split(InfFullPath)
|
(InfPath, InfName) = os.path.split(InfFullPath)
|
||||||
SqlCommand = """select Value1 from Dec where BelongsToFile =
|
SqlCommand = """select Value1 from Dec where BelongsToFile =
|
||||||
(select ID from File where FullPath = '%s') and Model = %s""" \
|
(select ID from File where FullPath = '%s') and Model = %s""" \
|
||||||
% (DecFullPath, MODEL_EFI_INCLUDE)
|
% (DecFullPath, MODEL_EFI_INCLUDE)
|
||||||
NewRecordSet = Db.TblDec.Exec(SqlCommand)
|
NewRecordSet = Db.TblDec.Exec(SqlCommand)
|
||||||
|
@ -46,9 +46,22 @@ def GetIncludeListOfFile(WorkSpace, Filepath, Db):
|
||||||
IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))
|
IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))
|
||||||
if IncludePath not in IncludeList:
|
if IncludePath not in IncludeList:
|
||||||
IncludeList.append(IncludePath)
|
IncludeList.append(IncludePath)
|
||||||
|
|
||||||
return IncludeList
|
return IncludeList
|
||||||
|
|
||||||
|
## Get the file list
|
||||||
|
#
|
||||||
|
# Search table file and find all specific type files
|
||||||
|
#
|
||||||
|
def GetFileList(FileModel, Db):
|
||||||
|
FileList = []
|
||||||
|
SqlCommand = """select FullPath from File where Model = %s""" % str(FileModel)
|
||||||
|
RecordSet = Db.TblFile.Exec(SqlCommand)
|
||||||
|
for Record in RecordSet:
|
||||||
|
FileList.append(Record[0])
|
||||||
|
|
||||||
|
return FileList
|
||||||
|
|
||||||
## Get the table list
|
## Get the table list
|
||||||
#
|
#
|
||||||
# Search table file and find all small tables
|
# Search table file and find all small tables
|
||||||
|
@ -60,6 +73,6 @@ def GetTableList(FileModelList, Table, Db):
|
||||||
for Record in RecordSet:
|
for Record in RecordSet:
|
||||||
TableName = Table + str(Record[0])
|
TableName = Table + str(Record[0])
|
||||||
TableList.append(TableName)
|
TableList.append(TableName)
|
||||||
|
|
||||||
return TableList
|
return TableList
|
||||||
|
|
||||||
|
|
|
@ -514,7 +514,9 @@ def CollectSourceCodeDataIntoDB(RootDir):
|
||||||
dirnames.append(Dirname)
|
dirnames.append(Dirname)
|
||||||
|
|
||||||
for f in filenames:
|
for f in filenames:
|
||||||
|
collector = None
|
||||||
FullName = os.path.normpath(os.path.join(dirpath, f))
|
FullName = os.path.normpath(os.path.join(dirpath, f))
|
||||||
|
model = DataClass.MODEL_FILE_OTHERS
|
||||||
if os.path.splitext(f)[1] in ('.h', '.c'):
|
if os.path.splitext(f)[1] in ('.h', '.c'):
|
||||||
EdkLogger.info("Parsing " + FullName)
|
EdkLogger.info("Parsing " + FullName)
|
||||||
model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
|
model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
|
||||||
|
@ -526,12 +528,13 @@ def CollectSourceCodeDataIntoDB(RootDir):
|
||||||
collector.CleanFileProfileBuffer()
|
collector.CleanFileProfileBuffer()
|
||||||
collector.ParseFileWithClearedPPDirective()
|
collector.ParseFileWithClearedPPDirective()
|
||||||
# collector.PrintFragments()
|
# collector.PrintFragments()
|
||||||
BaseName = os.path.basename(f)
|
BaseName = os.path.basename(f)
|
||||||
DirName = os.path.dirname(FullName)
|
DirName = os.path.dirname(FullName)
|
||||||
Ext = os.path.splitext(f)[1].lstrip('.')
|
Ext = os.path.splitext(f)[1].lstrip('.')
|
||||||
ModifiedTime = os.path.getmtime(FullName)
|
ModifiedTime = os.path.getmtime(FullName)
|
||||||
FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
|
FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
|
||||||
FileObjList.append(FileObj)
|
FileObjList.append(FileObj)
|
||||||
|
if collector:
|
||||||
collector.CleanFileProfileBuffer()
|
collector.CleanFileProfileBuffer()
|
||||||
|
|
||||||
if len(ParseErrorFileList) > 0:
|
if len(ParseErrorFileList) > 0:
|
||||||
|
@ -539,7 +542,8 @@ def CollectSourceCodeDataIntoDB(RootDir):
|
||||||
|
|
||||||
Db = GetDB()
|
Db = GetDB()
|
||||||
for file in FileObjList:
|
for file in FileObjList:
|
||||||
Db.InsertOneFile(file)
|
if file.ExtName.upper() not in ['INF', 'DEC', 'DSC', 'FDF']:
|
||||||
|
Db.InsertOneFile(file)
|
||||||
|
|
||||||
Db.UpdateIdentifierBelongsToFunction()
|
Db.UpdateIdentifierBelongsToFunction()
|
||||||
|
|
||||||
|
@ -552,7 +556,6 @@ def GetTableID(FullFileName, ErrorMsgList = None):
|
||||||
from File
|
from File
|
||||||
where FullPath like '%s'
|
where FullPath like '%s'
|
||||||
""" % FullFileName
|
""" % FullFileName
|
||||||
|
|
||||||
ResultSet = Db.TblFile.Exec(SqlStatement)
|
ResultSet = Db.TblFile.Exec(SqlStatement)
|
||||||
|
|
||||||
FileID = -1
|
FileID = -1
|
||||||
|
@ -567,6 +570,8 @@ def GetTableID(FullFileName, ErrorMsgList = None):
|
||||||
return FileID
|
return FileID
|
||||||
|
|
||||||
def GetIncludeFileList(FullFileName):
|
def GetIncludeFileList(FullFileName):
|
||||||
|
if os.path.splitext(FullFileName)[1].upper() not in ('.H'):
|
||||||
|
return []
|
||||||
IFList = IncludeFileListDict.get(FullFileName)
|
IFList = IncludeFileListDict.get(FullFileName)
|
||||||
if IFList != None:
|
if IFList != None:
|
||||||
return IFList
|
return IFList
|
||||||
|
@ -2301,21 +2306,32 @@ def CheckFileHeaderDoxygenComments(FullFileName):
|
||||||
FileTable = 'Identifier' + str(FileID)
|
FileTable = 'Identifier' + str(FileID)
|
||||||
SqlStatement = """ select Value, ID
|
SqlStatement = """ select Value, ID
|
||||||
from %s
|
from %s
|
||||||
where Model = %d and StartLine = 1 and StartColumn = 0
|
where Model = %d and (StartLine = 1 or StartLine = 7 or StartLine = 8) and StartColumn = 0
|
||||||
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
|
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
|
||||||
ResultSet = Db.TblFile.Exec(SqlStatement)
|
ResultSet = Db.TblFile.Exec(SqlStatement)
|
||||||
if len(ResultSet) == 0:
|
if len(ResultSet) == 0:
|
||||||
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No Comment appear at the very beginning of file.', 'File', FileID)
|
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No Comment appear at the very beginning of file.', 'File', FileID)
|
||||||
return ErrorMsgList
|
return ErrorMsgList
|
||||||
|
|
||||||
|
IsFoundError1 = True
|
||||||
|
IsFoundError2 = True
|
||||||
|
IsFoundError3 = True
|
||||||
for Result in ResultSet:
|
for Result in ResultSet:
|
||||||
CommentStr = Result[0]
|
CommentStr = Result[0].strip()
|
||||||
if not CommentStr.startswith('/** @file'):
|
ID = Result[1]
|
||||||
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, Result[1])
|
if CommentStr.startswith('/** @file'):
|
||||||
if not CommentStr.endswith('**/'):
|
IsFoundError1 = False
|
||||||
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with **/', FileTable, Result[1])
|
if CommentStr.endswith('**/'):
|
||||||
if CommentStr.find('.') == -1:
|
IsFoundError2 = False
|
||||||
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period \'.\'', FileTable, Result[1])
|
if CommentStr.find('.') != -1:
|
||||||
|
IsFoundError3 = False
|
||||||
|
|
||||||
|
if IsFoundError1:
|
||||||
|
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)
|
||||||
|
if IsFoundError2:
|
||||||
|
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with ""**/""', FileTable, ID)
|
||||||
|
if IsFoundError3:
|
||||||
|
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period "".""', FileTable, ID)
|
||||||
|
|
||||||
def CheckFuncHeaderDoxygenComments(FullFileName):
|
def CheckFuncHeaderDoxygenComments(FullFileName):
|
||||||
ErrorMsgList = []
|
ErrorMsgList = []
|
||||||
|
|
|
@ -21,7 +21,7 @@ Version = 0.1
|
||||||
# Identify to if check all items
|
# Identify to if check all items
|
||||||
# 1 - Check all items and ignore all other detailed items
|
# 1 - Check all items and ignore all other detailed items
|
||||||
# 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
|
# 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
|
||||||
#
|
#
|
||||||
CheckAll = 0
|
CheckAll = 0
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -68,6 +68,8 @@ GeneralCheckNoProgma = 1
|
||||||
GeneralCheckCarriageReturn = 1
|
GeneralCheckCarriageReturn = 1
|
||||||
# Check whether the file exists
|
# Check whether the file exists
|
||||||
GeneralCheckFileExistence = 1
|
GeneralCheckFileExistence = 1
|
||||||
|
# Check whether file has non ACSII char
|
||||||
|
GeneralCheckNonAcsii = 1
|
||||||
|
|
||||||
#
|
#
|
||||||
# Space Checking
|
# Space Checking
|
||||||
|
@ -81,7 +83,7 @@ PredicateExpressionCheckAll = 0
|
||||||
|
|
||||||
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
|
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
|
||||||
PredicateExpressionCheckBooleanValue = 1
|
PredicateExpressionCheckBooleanValue = 1
|
||||||
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
|
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
|
||||||
PredicateExpressionCheckNonBooleanOperator = 1
|
PredicateExpressionCheckNonBooleanOperator = 1
|
||||||
# Check whether a comparison of any pointer to zero must be done via the NULL type
|
# Check whether a comparison of any pointer to zero must be done via the NULL type
|
||||||
PredicateExpressionCheckComparisonNullType = 1
|
PredicateExpressionCheckComparisonNullType = 1
|
||||||
|
@ -189,7 +191,7 @@ DoxygenCheckAll = 0
|
||||||
DoxygenCheckFileHeader = 1
|
DoxygenCheckFileHeader = 1
|
||||||
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
|
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
|
||||||
DoxygenCheckFunctionHeader = 1
|
DoxygenCheckFunctionHeader = 1
|
||||||
# Check whether the first line of text in a comment block is a brief description of the element being documented.
|
# Check whether the first line of text in a comment block is a brief description of the element being documented.
|
||||||
# The brief description must end with a period.
|
# The brief description must end with a period.
|
||||||
DoxygenCheckCommentDescription = 1
|
DoxygenCheckCommentDescription = 1
|
||||||
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
|
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
|
||||||
|
@ -208,8 +210,8 @@ MetaDataFileCheckPathName = 1
|
||||||
MetaDataFileCheckGenerateFileList = 1
|
MetaDataFileCheckGenerateFileList = 1
|
||||||
# The path of log file
|
# The path of log file
|
||||||
MetaDataFileCheckPathOfGenerateFileList = File.log
|
MetaDataFileCheckPathOfGenerateFileList = File.log
|
||||||
# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
|
# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
|
||||||
# Each Library Instance must specify the Supported Module Types in its INF file,
|
# Each Library Instance must specify the Supported Module Types in its INF file,
|
||||||
# and any module specifying the library instance must be one of the supported types.
|
# and any module specifying the library instance must be one of the supported types.
|
||||||
MetaDataFileCheckLibraryInstance = 1
|
MetaDataFileCheckLibraryInstance = 1
|
||||||
# Check whether a Library Instance has been defined for all dependent library classes
|
# Check whether a Library Instance has been defined for all dependent library classes
|
||||||
|
@ -242,3 +244,6 @@ MetaDataFileCheckModuleFileGuidDuplication = 1
|
||||||
# GotoStatementCheckAll = 0
|
# GotoStatementCheckAll = 0
|
||||||
# SpellingCheckAll = 0
|
# SpellingCheckAll = 0
|
||||||
#
|
#
|
||||||
|
|
||||||
|
# A list for binary file ext name
|
||||||
|
BinaryExtList = EXE, EFI, FV, ROM, DLL, COM, BMP, GIF, PYD, CMP, BIN, JPG, UNI, RAW, COM2, LIB, DEPEX, SYS, DB
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
## @file
|
## @file
|
||||||
# Windows makefile for Python tools build.
|
# Linux makefile for Python tools build.
|
||||||
#
|
#
|
||||||
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
|
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
|
||||||
# This program and the accompanying materials
|
# This program and the accompanying materials
|
||||||
|
|
|
@ -751,7 +751,7 @@ class FdfParser:
|
||||||
raise Warning("Value %s is not a number", self.FileName, Line)
|
raise Warning("Value %s is not a number", self.FileName, Line)
|
||||||
|
|
||||||
for Profile in AllMacroList:
|
for Profile in AllMacroList:
|
||||||
if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
|
if Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
|
||||||
if Op == None:
|
if Op == None:
|
||||||
if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
|
if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -80,7 +80,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
||||||
#
|
#
|
||||||
|
|
||||||
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
|
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
|
||||||
ErrorCode, ErrorInfo = PathClassObj.Validate()
|
ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
|
||||||
if ErrorCode != 0:
|
if ErrorCode != 0:
|
||||||
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
|
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
|
||||||
|
|
||||||
|
@ -343,7 +343,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
||||||
if len(PlatformArchList) == 0:
|
if len(PlatformArchList) == 0:
|
||||||
self.InDsc = False
|
self.InDsc = False
|
||||||
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
|
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
|
||||||
ErrorCode, ErrorInfo = PathClassObj.Validate()
|
ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
|
||||||
if ErrorCode != 0:
|
if ErrorCode != 0:
|
||||||
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
|
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
|
||||||
if len(ArchList) == 1:
|
if len(ArchList) == 1:
|
||||||
|
|
|
@ -172,6 +172,7 @@ def main():
|
||||||
|
|
||||||
"""call Workspace build create database"""
|
"""call Workspace build create database"""
|
||||||
os.environ["WORKSPACE"] = Workspace
|
os.environ["WORKSPACE"] = Workspace
|
||||||
|
FdfParser.InputMacroDict["WORKSPACE"] = Workspace
|
||||||
BuildWorkSpace = WorkspaceDatabase(':memory:', FdfParser.InputMacroDict)
|
BuildWorkSpace = WorkspaceDatabase(':memory:', FdfParser.InputMacroDict)
|
||||||
BuildWorkSpace.InitDatabase()
|
BuildWorkSpace.InitDatabase()
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ MODULES=encodings.cp437,encodings.gbk,encodings.utf_16,encodings.utf_8,encodings
|
||||||
BIN_DIR=$(EDK_TOOLS_PATH)\Bin\Win32
|
BIN_DIR=$(EDK_TOOLS_PATH)\Bin\Win32
|
||||||
|
|
||||||
|
|
||||||
APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe $(BIN_DIR)\GenPatchPcdTable.exe $(BIN_DIR)\PatchPcdValue.exe
|
APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe $(BIN_DIR)\GenPatchPcdTable.exe $(BIN_DIR)\PatchPcdValue.exe $(BIN_DIR)\BPDG.exe
|
||||||
|
|
||||||
COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
|
COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Common\Database.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Common\Database.py \
|
||||||
|
@ -46,6 +46,7 @@ COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Common\String.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Common\String.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Common\TargetTxtClassObject.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Common\TargetTxtClassObject.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Common\ToolDefClassObject.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Common\ToolDefClassObject.py \
|
||||||
|
$(BASE_TOOLS_PATH)\Source\Python\Common\VpdInfoFile.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlParser.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlParser.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlRoutines.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlRoutines.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Common\__init__.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Common\__init__.py \
|
||||||
|
@ -62,7 +63,7 @@ COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Autogen\GenMake.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Autogen\GenMake.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Autogen\StrGather.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Autogen\StrGather.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Autogen\UniClassObject.py \
|
$(BASE_TOOLS_PATH)\Source\Python\Autogen\UniClassObject.py \
|
||||||
$(BASE_TOOLS_PATH)\Source\Python\Autogen\__init__.py
|
$(BASE_TOOLS_PATH)\Source\Python\Autogen\__init__.py
|
||||||
|
|
||||||
|
|
||||||
all: SetPythonPath $(APPLICATIONS)
|
all: SetPythonPath $(APPLICATIONS)
|
||||||
|
@ -100,6 +101,9 @@ $(BIN_DIR)\GenPatchPcdTable.exe: $(BASE_TOOLS_PATH)\Source\Python\GenPatchPcdTab
|
||||||
$(BIN_DIR)\PatchPcdValue.exe: $(BASE_TOOLS_PATH)\Source\Python\PatchPcdValue\PatchPcdValue.py $(COMMON_PYTHON)
|
$(BIN_DIR)\PatchPcdValue.exe: $(BASE_TOOLS_PATH)\Source\Python\PatchPcdValue\PatchPcdValue.py $(COMMON_PYTHON)
|
||||||
@pushd . & @cd PatchPcdValue & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) PatchPcdValue.py & @popd
|
@pushd . & @cd PatchPcdValue & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) PatchPcdValue.py & @popd
|
||||||
|
|
||||||
|
$(BIN_DIR)\BPDG.exe: $(BASE_TOOLS_PATH)\Source\Python\BPDG\BPDG.py $(COMMON_PYTHON)
|
||||||
|
@pushd . & @cd BPDG & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) BPDG.py & @popd
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
cleanall:
|
cleanall:
|
||||||
@del /f /q $(BIN_DIR)\*.pyd $(BIN_DIR)\*.dll
|
@del /f /q $(BIN_DIR)\*.pyd $(BIN_DIR)\*.dll
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
## @file
|
## @file
|
||||||
# This file is used to define each component of the build database
|
# This file is used to define each component of the build database
|
||||||
#
|
#
|
||||||
# Copyright (c) 2007 - 2008, Intel Corporation. All rights reserved.<BR>
|
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
|
||||||
# This program and the accompanying materials
|
# This program and the accompanying materials
|
||||||
# are licensed and made available under the terms and conditions of the BSD License
|
# are licensed and made available under the terms and conditions of the BSD License
|
||||||
# which accompanies this distribution. The full text of the license may be found at
|
# which accompanies this distribution. The full text of the license may be found at
|
||||||
|
@ -31,6 +31,7 @@ from Common.BuildToolError import *
|
||||||
# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
|
# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
|
||||||
# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
|
# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
|
||||||
# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
|
# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
|
||||||
|
# @param GuidValue: Input value for TokenSpaceGuidValue of Pcd, default is None
|
||||||
#
|
#
|
||||||
# @var TokenCName: To store value for TokenCName
|
# @var TokenCName: To store value for TokenCName
|
||||||
# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
|
# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
|
||||||
|
@ -43,7 +44,7 @@ from Common.BuildToolError import *
|
||||||
# @var Phase: To store value for Phase, default is "DXE"
|
# @var Phase: To store value for Phase, default is "DXE"
|
||||||
#
|
#
|
||||||
class PcdClassObject(object):
|
class PcdClassObject(object):
|
||||||
def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, GuidValue = None):
|
def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, IsOverrided = False, GuidValue = None):
|
||||||
self.TokenCName = Name
|
self.TokenCName = Name
|
||||||
self.TokenSpaceGuidCName = Guid
|
self.TokenSpaceGuidCName = Guid
|
||||||
self.TokenSpaceGuidValue = GuidValue
|
self.TokenSpaceGuidValue = GuidValue
|
||||||
|
@ -55,7 +56,8 @@ class PcdClassObject(object):
|
||||||
self.SkuInfoList = SkuInfoList
|
self.SkuInfoList = SkuInfoList
|
||||||
self.Phase = "DXE"
|
self.Phase = "DXE"
|
||||||
self.Pending = False
|
self.Pending = False
|
||||||
|
self.IsOverrided = IsOverrided
|
||||||
|
|
||||||
## Convert the class to a string
|
## Convert the class to a string
|
||||||
#
|
#
|
||||||
# Convert each member of the class to string
|
# Convert each member of the class to string
|
||||||
|
@ -73,7 +75,7 @@ class PcdClassObject(object):
|
||||||
'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
|
'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
|
||||||
for Item in self.SkuInfoList.values():
|
for Item in self.SkuInfoList.values():
|
||||||
Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
|
Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
|
||||||
Rtn = Rtn + str(self.IsOverrided)
|
Rtn = Rtn + ', IsOverrided=' + str(self.IsOverrided)
|
||||||
|
|
||||||
return Rtn
|
return Rtn
|
||||||
|
|
||||||
|
|
|
@ -82,6 +82,7 @@ class MetaFileParser(object):
|
||||||
self.MetaFile = FilePath
|
self.MetaFile = FilePath
|
||||||
self._FileDir = os.path.dirname(self.MetaFile)
|
self._FileDir = os.path.dirname(self.MetaFile)
|
||||||
self._Macros = copy.copy(Macros)
|
self._Macros = copy.copy(Macros)
|
||||||
|
self._Macros["WORKSPACE"] = os.environ["WORKSPACE"]
|
||||||
|
|
||||||
# for recursive parsing
|
# for recursive parsing
|
||||||
self._Owner = Owner
|
self._Owner = Owner
|
||||||
|
@ -490,7 +491,12 @@ class InfParser(MetaFileParser):
|
||||||
## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
|
## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
|
||||||
def _PcdParser(self):
|
def _PcdParser(self):
|
||||||
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
|
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
|
||||||
self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
|
ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
|
||||||
|
if len(ValueList) != 2:
|
||||||
|
EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
|
||||||
|
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
|
||||||
|
File=self.MetaFile, Line=self._LineIndex+1)
|
||||||
|
self._ValueList[0:1] = ValueList
|
||||||
if len(TokenList) > 1:
|
if len(TokenList) > 1:
|
||||||
self._ValueList[2] = TokenList[1]
|
self._ValueList[2] = TokenList[1]
|
||||||
if self._ValueList[0] == '' or self._ValueList[1] == '':
|
if self._ValueList[0] == '' or self._ValueList[1] == '':
|
||||||
|
@ -564,6 +570,7 @@ class DscParser(MetaFileParser):
|
||||||
|
|
||||||
# sections which allow "!include" directive
|
# sections which allow "!include" directive
|
||||||
_IncludeAllowedSection = [
|
_IncludeAllowedSection = [
|
||||||
|
TAB_COMMON_DEFINES.upper(),
|
||||||
TAB_LIBRARIES.upper(),
|
TAB_LIBRARIES.upper(),
|
||||||
TAB_LIBRARY_CLASSES.upper(),
|
TAB_LIBRARY_CLASSES.upper(),
|
||||||
TAB_SKUIDS.upper(),
|
TAB_SKUIDS.upper(),
|
||||||
|
@ -648,7 +655,25 @@ class DscParser(MetaFileParser):
|
||||||
continue
|
continue
|
||||||
# file private macros
|
# file private macros
|
||||||
elif Line.upper().startswith('DEFINE '):
|
elif Line.upper().startswith('DEFINE '):
|
||||||
self._MacroParser()
|
(Name, Value) = self._MacroParser()
|
||||||
|
# Make the defined macro in DSC [Defines] section also
|
||||||
|
# available for FDF file.
|
||||||
|
if self._SectionName == TAB_COMMON_DEFINES.upper():
|
||||||
|
self._LastItem = self._Store(
|
||||||
|
MODEL_META_DATA_GLOBAL_DEFINE,
|
||||||
|
Name,
|
||||||
|
Value,
|
||||||
|
'',
|
||||||
|
'COMMON',
|
||||||
|
'COMMON',
|
||||||
|
self._Owner,
|
||||||
|
self._From,
|
||||||
|
self._LineIndex+1,
|
||||||
|
-1,
|
||||||
|
self._LineIndex+1,
|
||||||
|
-1,
|
||||||
|
self._Enabled
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
elif Line.upper().startswith('EDK_GLOBAL '):
|
elif Line.upper().startswith('EDK_GLOBAL '):
|
||||||
(Name, Value) = self._MacroParser()
|
(Name, Value) = self._MacroParser()
|
||||||
|
@ -715,6 +740,22 @@ class DscParser(MetaFileParser):
|
||||||
if TokenList[0] in ['FLASH_DEFINITION', 'OUTPUT_DIRECTORY']:
|
if TokenList[0] in ['FLASH_DEFINITION', 'OUTPUT_DIRECTORY']:
|
||||||
TokenList[1] = NormPath(TokenList[1], self._Macros)
|
TokenList[1] = NormPath(TokenList[1], self._Macros)
|
||||||
self._ValueList[0:len(TokenList)] = TokenList
|
self._ValueList[0:len(TokenList)] = TokenList
|
||||||
|
# Treat elements in the [defines] section as global macros for FDF file.
|
||||||
|
self._LastItem = self._Store(
|
||||||
|
MODEL_META_DATA_GLOBAL_DEFINE,
|
||||||
|
TokenList[0],
|
||||||
|
TokenList[1],
|
||||||
|
'',
|
||||||
|
'COMMON',
|
||||||
|
'COMMON',
|
||||||
|
self._Owner,
|
||||||
|
self._From,
|
||||||
|
self._LineIndex+1,
|
||||||
|
-1,
|
||||||
|
self._LineIndex+1,
|
||||||
|
-1,
|
||||||
|
self._Enabled
|
||||||
|
)
|
||||||
|
|
||||||
## <subsection_header> parser
|
## <subsection_header> parser
|
||||||
def _SubsectionHeaderParser(self):
|
def _SubsectionHeaderParser(self):
|
||||||
|
@ -762,7 +803,7 @@ class DscParser(MetaFileParser):
|
||||||
EdkLogger.error("Parser", FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1,
|
EdkLogger.error("Parser", FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1,
|
||||||
ExtraData="'!include' is not allowed under section [%s]" % self._SectionName)
|
ExtraData="'!include' is not allowed under section [%s]" % self._SectionName)
|
||||||
# the included file must be relative to the parsing file
|
# the included file must be relative to the parsing file
|
||||||
IncludedFile = os.path.join(self._FileDir, self._ValueList[1])
|
IncludedFile = os.path.join(self._FileDir, NormPath(self._ValueList[1], self._Macros))
|
||||||
Parser = DscParser(IncludedFile, self._FileType, self._Table, self._Macros, From=self._LastItem)
|
Parser = DscParser(IncludedFile, self._FileType, self._Table, self._Macros, From=self._LastItem)
|
||||||
# set the parser status with current status
|
# set the parser status with current status
|
||||||
Parser._SectionName = self._SectionName
|
Parser._SectionName = self._SectionName
|
||||||
|
@ -781,6 +822,7 @@ class DscParser(MetaFileParser):
|
||||||
self._SectionType = Parser._SectionType
|
self._SectionType = Parser._SectionType
|
||||||
self._Scope = Parser._Scope
|
self._Scope = Parser._Scope
|
||||||
self._Enabled = Parser._Enabled
|
self._Enabled = Parser._Enabled
|
||||||
|
self._Macros.update(Parser._Macros)
|
||||||
else:
|
else:
|
||||||
if DirectiveName in ["!IF", "!IFDEF", "!IFNDEF"]:
|
if DirectiveName in ["!IF", "!IFDEF", "!IFNDEF"]:
|
||||||
# evaluate the expression
|
# evaluate the expression
|
||||||
|
@ -965,6 +1007,7 @@ class DecParser(MetaFileParser):
|
||||||
#
|
#
|
||||||
def __init__(self, FilePath, FileType, Table, Macro=None):
|
def __init__(self, FilePath, FileType, Table, Macro=None):
|
||||||
MetaFileParser.__init__(self, FilePath, FileType, Table, Macro, -1)
|
MetaFileParser.__init__(self, FilePath, FileType, Table, Macro, -1)
|
||||||
|
self._Comments = []
|
||||||
|
|
||||||
## Parser starter
|
## Parser starter
|
||||||
def Start(self):
|
def Start(self):
|
||||||
|
@ -975,27 +1018,34 @@ class DecParser(MetaFileParser):
|
||||||
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
|
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
|
||||||
|
|
||||||
for Index in range(0, len(self._Content)):
|
for Index in range(0, len(self._Content)):
|
||||||
Line = CleanString(self._Content[Index])
|
Line, Comment = CleanString2(self._Content[Index])
|
||||||
|
self._CurrentLine = Line
|
||||||
|
self._LineIndex = Index
|
||||||
|
|
||||||
|
# save comment for later use
|
||||||
|
if Comment:
|
||||||
|
self._Comments.append((Comment, self._LineIndex+1))
|
||||||
# skip empty line
|
# skip empty line
|
||||||
if Line == '':
|
if Line == '':
|
||||||
continue
|
continue
|
||||||
self._CurrentLine = Line
|
|
||||||
self._LineIndex = Index
|
|
||||||
|
|
||||||
# section header
|
# section header
|
||||||
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
|
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
|
||||||
self._SectionHeaderParser()
|
self._SectionHeaderParser()
|
||||||
|
self._Comments = []
|
||||||
continue
|
continue
|
||||||
elif Line.startswith('DEFINE '):
|
elif Line.startswith('DEFINE '):
|
||||||
self._MacroParser()
|
self._MacroParser()
|
||||||
continue
|
continue
|
||||||
elif len(self._SectionType) == 0:
|
elif len(self._SectionType) == 0:
|
||||||
|
self._Comments = []
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# section content
|
# section content
|
||||||
self._ValueList = ['','','']
|
self._ValueList = ['','','']
|
||||||
self._SectionParser[self._SectionType[0]](self)
|
self._SectionParser[self._SectionType[0]](self)
|
||||||
if self._ValueList == None:
|
if self._ValueList == None:
|
||||||
|
self._Comments = []
|
||||||
continue
|
continue
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -1017,6 +1067,22 @@ class DecParser(MetaFileParser):
|
||||||
-1,
|
-1,
|
||||||
0
|
0
|
||||||
)
|
)
|
||||||
|
for Comment, LineNo in self._Comments:
|
||||||
|
self._Store(
|
||||||
|
MODEL_META_DATA_COMMENT,
|
||||||
|
Comment,
|
||||||
|
self._ValueList[0],
|
||||||
|
self._ValueList[1],
|
||||||
|
Arch,
|
||||||
|
ModuleType,
|
||||||
|
self._LastItem,
|
||||||
|
LineNo,
|
||||||
|
-1,
|
||||||
|
LineNo,
|
||||||
|
-1,
|
||||||
|
0
|
||||||
|
)
|
||||||
|
self._Comments = []
|
||||||
self._Done()
|
self._Done()
|
||||||
|
|
||||||
## Section header parser
|
## Section header parser
|
||||||
|
|
|
@ -18,6 +18,7 @@ import sqlite3
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import pickle
|
import pickle
|
||||||
|
import uuid
|
||||||
|
|
||||||
import Common.EdkLogger as EdkLogger
|
import Common.EdkLogger as EdkLogger
|
||||||
import Common.GlobalData as GlobalData
|
import Common.GlobalData as GlobalData
|
||||||
|
@ -99,6 +100,10 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
RecordList = self._RawData[MODEL_META_DATA_DEFINE, self._Arch]
|
RecordList = self._RawData[MODEL_META_DATA_DEFINE, self._Arch]
|
||||||
for Record in RecordList:
|
for Record in RecordList:
|
||||||
GlobalData.gEdkGlobal[Record[0]] = Record[1]
|
GlobalData.gEdkGlobal[Record[0]] = Record[1]
|
||||||
|
|
||||||
|
RecordList = self._RawData[MODEL_META_DATA_GLOBAL_DEFINE, self._Arch]
|
||||||
|
for Record in RecordList:
|
||||||
|
GlobalData.gGlobalDefines[Record[0]] = Record[1]
|
||||||
|
|
||||||
## XXX[key] = value
|
## XXX[key] = value
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
|
@ -135,6 +140,8 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
self._Pcds = None
|
self._Pcds = None
|
||||||
self._BuildOptions = None
|
self._BuildOptions = None
|
||||||
self._LoadFixAddress = None
|
self._LoadFixAddress = None
|
||||||
|
self._VpdToolGuid = None
|
||||||
|
self._VpdFileName = None
|
||||||
|
|
||||||
## Get architecture
|
## Get architecture
|
||||||
def _GetArch(self):
|
def _GetArch(self):
|
||||||
|
@ -188,6 +195,18 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
self._SkuName = Record[1]
|
self._SkuName = Record[1]
|
||||||
elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
|
elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
|
||||||
self._LoadFixAddress = Record[1]
|
self._LoadFixAddress = Record[1]
|
||||||
|
elif Name == TAB_DSC_DEFINES_VPD_TOOL_GUID:
|
||||||
|
#
|
||||||
|
# try to convert GUID to a real UUID value to see whether the GUID is format
|
||||||
|
# for VPD_TOOL_GUID is correct.
|
||||||
|
#
|
||||||
|
try:
|
||||||
|
uuid.UUID(Record[1])
|
||||||
|
except:
|
||||||
|
EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)
|
||||||
|
self._VpdToolGuid = Record[1]
|
||||||
|
elif Name == TAB_DSC_DEFINES_VPD_FILENAME:
|
||||||
|
self._VpdFileName = Record[1]
|
||||||
# set _Header to non-None in order to avoid database re-querying
|
# set _Header to non-None in order to avoid database re-querying
|
||||||
self._Header = 'DUMMY'
|
self._Header = 'DUMMY'
|
||||||
|
|
||||||
|
@ -267,6 +286,8 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
def _SetSkuName(self, Value):
|
def _SetSkuName(self, Value):
|
||||||
if Value in self.SkuIds:
|
if Value in self.SkuIds:
|
||||||
self._SkuName = Value
|
self._SkuName = Value
|
||||||
|
# Needs to re-retrieve the PCD information
|
||||||
|
self._Pcds = None
|
||||||
|
|
||||||
def _GetFdfFile(self):
|
def _GetFdfFile(self):
|
||||||
if self._FlashDefinition == None:
|
if self._FlashDefinition == None:
|
||||||
|
@ -321,6 +342,24 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
self._LoadFixAddress = ''
|
self._LoadFixAddress = ''
|
||||||
return self._LoadFixAddress
|
return self._LoadFixAddress
|
||||||
|
|
||||||
|
## Retrieve the GUID string for VPD tool
|
||||||
|
def _GetVpdToolGuid(self):
|
||||||
|
if self._VpdToolGuid == None:
|
||||||
|
if self._Header == None:
|
||||||
|
self._GetHeaderInfo()
|
||||||
|
if self._VpdToolGuid == None:
|
||||||
|
self._VpdToolGuid = ''
|
||||||
|
return self._VpdToolGuid
|
||||||
|
|
||||||
|
## Retrieve the VPD file Name, this is optional in DSC file
|
||||||
|
def _GetVpdFileName(self):
|
||||||
|
if self._VpdFileName == None:
|
||||||
|
if self._Header == None:
|
||||||
|
self._GetHeaderInfo()
|
||||||
|
if self._VpdFileName == None:
|
||||||
|
self._VpdFileName = ''
|
||||||
|
return self._VpdFileName
|
||||||
|
|
||||||
## Retrieve [SkuIds] section information
|
## Retrieve [SkuIds] section information
|
||||||
def _GetSkuIds(self):
|
def _GetSkuIds(self):
|
||||||
if self._SkuIds == None:
|
if self._SkuIds == None:
|
||||||
|
@ -418,6 +457,7 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
'',
|
'',
|
||||||
MaxDatumSize,
|
MaxDatumSize,
|
||||||
{},
|
{},
|
||||||
|
False,
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd
|
Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd
|
||||||
|
@ -576,6 +616,7 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
'',
|
'',
|
||||||
MaxDatumSize,
|
MaxDatumSize,
|
||||||
{},
|
{},
|
||||||
|
False,
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
return Pcds
|
return Pcds
|
||||||
|
@ -619,6 +660,7 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
'',
|
'',
|
||||||
MaxDatumSize,
|
MaxDatumSize,
|
||||||
{self.SkuName : SkuInfo},
|
{self.SkuName : SkuInfo},
|
||||||
|
False,
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
return Pcds
|
return Pcds
|
||||||
|
@ -661,6 +703,7 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
'',
|
'',
|
||||||
'',
|
'',
|
||||||
{self.SkuName : SkuInfo},
|
{self.SkuName : SkuInfo},
|
||||||
|
False,
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
return Pcds
|
return Pcds
|
||||||
|
@ -686,15 +729,21 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
|
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
|
||||||
# Remove redundant PCD candidates, per the ARCH and SKU
|
# Remove redundant PCD candidates, per the ARCH and SKU
|
||||||
for PcdCName, TokenSpaceGuid in PcdSet:
|
for PcdCName, TokenSpaceGuid in PcdSet:
|
||||||
ValueList = ['', '']
|
ValueList = ['', '', '']
|
||||||
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
|
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
|
||||||
if Setting == None:
|
if Setting == None:
|
||||||
continue
|
continue
|
||||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||||
ValueList[0:len(TokenList)] = TokenList
|
ValueList[0:len(TokenList)] = TokenList
|
||||||
VpdOffset, MaxDatumSize = ValueList
|
#
|
||||||
|
# For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
|
||||||
|
# For the Integer & Boolean type, the optional data can only be InitialValue.
|
||||||
|
# At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
|
||||||
|
# until the DEC parser has been called.
|
||||||
|
#
|
||||||
|
VpdOffset, MaxDatumSize, InitialValue = ValueList
|
||||||
|
|
||||||
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset)
|
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset, InitialValue)
|
||||||
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
|
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
|
||||||
PcdCName,
|
PcdCName,
|
||||||
TokenSpaceGuid,
|
TokenSpaceGuid,
|
||||||
|
@ -704,6 +753,7 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
'',
|
'',
|
||||||
MaxDatumSize,
|
MaxDatumSize,
|
||||||
{self.SkuName : SkuInfo},
|
{self.SkuName : SkuInfo},
|
||||||
|
False,
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
return Pcds
|
return Pcds
|
||||||
|
@ -733,7 +783,7 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
#
|
#
|
||||||
def AddPcd(self, Name, Guid, Value):
|
def AddPcd(self, Name, Guid, Value):
|
||||||
if (Name, Guid) not in self.Pcds:
|
if (Name, Guid) not in self.Pcds:
|
||||||
self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, None)
|
self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)
|
||||||
self.Pcds[Name, Guid].DefaultValue = Value
|
self.Pcds[Name, Guid].DefaultValue = Value
|
||||||
|
|
||||||
Arch = property(_GetArch, _SetArch)
|
Arch = property(_GetArch, _SetArch)
|
||||||
|
@ -752,7 +802,8 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
BsBaseAddress = property(_GetBsBaseAddress)
|
BsBaseAddress = property(_GetBsBaseAddress)
|
||||||
RtBaseAddress = property(_GetRtBaseAddress)
|
RtBaseAddress = property(_GetRtBaseAddress)
|
||||||
LoadFixAddress = property(_GetLoadFixAddress)
|
LoadFixAddress = property(_GetLoadFixAddress)
|
||||||
|
VpdToolGuid = property(_GetVpdToolGuid)
|
||||||
|
VpdFileName = property(_GetVpdFileName)
|
||||||
SkuIds = property(_GetSkuIds)
|
SkuIds = property(_GetSkuIds)
|
||||||
Modules = property(_GetModules)
|
Modules = property(_GetModules)
|
||||||
LibraryInstances = property(_GetLibraryInstances)
|
LibraryInstances = property(_GetLibraryInstances)
|
||||||
|
@ -760,7 +811,7 @@ class DscBuildData(PlatformBuildClassObject):
|
||||||
Pcds = property(_GetPcds)
|
Pcds = property(_GetPcds)
|
||||||
BuildOptions = property(_GetBuildOptions)
|
BuildOptions = property(_GetBuildOptions)
|
||||||
|
|
||||||
## Platform build information from DSC file
|
## Platform build information from DEC file
|
||||||
#
|
#
|
||||||
# This class is used to retrieve information stored in database and convert them
|
# This class is used to retrieve information stored in database and convert them
|
||||||
# into PackageBuildClassObject form for easier use for AutoGen.
|
# into PackageBuildClassObject form for easier use for AutoGen.
|
||||||
|
@ -789,6 +840,7 @@ class DecBuildData(PackageBuildClassObject):
|
||||||
TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
|
TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
|
||||||
TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
|
TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
|
||||||
TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
|
TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
|
||||||
|
TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -830,6 +882,7 @@ class DecBuildData(PackageBuildClassObject):
|
||||||
self._PackageName = None
|
self._PackageName = None
|
||||||
self._Guid = None
|
self._Guid = None
|
||||||
self._Version = None
|
self._Version = None
|
||||||
|
self._PkgUniFile = None
|
||||||
self._Protocols = None
|
self._Protocols = None
|
||||||
self._Ppis = None
|
self._Ppis = None
|
||||||
self._Guids = None
|
self._Guids = None
|
||||||
|
@ -1063,6 +1116,7 @@ class DecBuildData(PackageBuildClassObject):
|
||||||
TokenNumber,
|
TokenNumber,
|
||||||
'',
|
'',
|
||||||
{},
|
{},
|
||||||
|
False,
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
return Pcds
|
return Pcds
|
||||||
|
@ -1914,6 +1968,7 @@ class InfBuildData(ModuleBuildClassObject):
|
||||||
'',
|
'',
|
||||||
'',
|
'',
|
||||||
{},
|
{},
|
||||||
|
False,
|
||||||
self.Guids[TokenSpaceGuid]
|
self.Guids[TokenSpaceGuid]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1927,7 +1982,7 @@ class InfBuildData(ModuleBuildClassObject):
|
||||||
# "FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"
|
# "FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"
|
||||||
#
|
#
|
||||||
PcdType = self._PCD_TYPE_STRING_[Type]
|
PcdType = self._PCD_TYPE_STRING_[Type]
|
||||||
if Type in [MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
|
if Type == MODEL_PCD_DYNAMIC:
|
||||||
Pcd.Pending = True
|
Pcd.Pending = True
|
||||||
for T in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:
|
for T in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:
|
||||||
if (PcdCName, TokenSpaceGuid, T) in Package.Pcds:
|
if (PcdCName, TokenSpaceGuid, T) in Package.Pcds:
|
||||||
|
@ -1994,7 +2049,7 @@ class InfBuildData(ModuleBuildClassObject):
|
||||||
|
|
||||||
## Database
|
## Database
|
||||||
#
|
#
|
||||||
# This class defined the build databse for all modules, packages and platform.
|
# This class defined the build database for all modules, packages and platform.
|
||||||
# It will call corresponding parser for the given file if it cannot find it in
|
# It will call corresponding parser for the given file if it cannot find it in
|
||||||
# the database.
|
# the database.
|
||||||
#
|
#
|
||||||
|
|
|
@ -23,6 +23,7 @@ import textwrap
|
||||||
import traceback
|
import traceback
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import struct
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
from Common import EdkLogger
|
from Common import EdkLogger
|
||||||
|
@ -101,6 +102,9 @@ gDriverTypeMap = {
|
||||||
'SMM_DRIVER' : '0xA (SMM)', # Extension of module type to support PI 1.1 SMM drivers
|
'SMM_DRIVER' : '0xA (SMM)', # Extension of module type to support PI 1.1 SMM drivers
|
||||||
}
|
}
|
||||||
|
|
||||||
|
## The look up table of the supported opcode in the dependency expression binaries
|
||||||
|
gOpCodeList = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "TRUE", "FALSE", "END", "SOR"]
|
||||||
|
|
||||||
##
|
##
|
||||||
# Writes a string to the file object.
|
# Writes a string to the file object.
|
||||||
#
|
#
|
||||||
|
@ -162,6 +166,60 @@ def FindIncludeFiles(Source, IncludePathList, IncludeFiles):
|
||||||
IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
|
IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
|
||||||
break
|
break
|
||||||
|
|
||||||
|
##
|
||||||
|
# Parse binary dependency expression section
|
||||||
|
#
|
||||||
|
# This utility class parses the dependency expression section and translate the readable
|
||||||
|
# GUID name and value.
|
||||||
|
#
|
||||||
|
class DepexParser(object):
|
||||||
|
##
|
||||||
|
# Constructor function for class DepexParser
|
||||||
|
#
|
||||||
|
# This constructor function collect GUID values so that the readable
|
||||||
|
# GUID name can be translated.
|
||||||
|
#
|
||||||
|
# @param self The object pointer
|
||||||
|
# @param Wa Workspace context information
|
||||||
|
#
|
||||||
|
def __init__(self, Wa):
|
||||||
|
self._GuidDb = {}
|
||||||
|
for Package in Wa.BuildDatabase.WorkspaceDb.PackageList:
|
||||||
|
for Protocol in Package.Protocols:
|
||||||
|
GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol])
|
||||||
|
self._GuidDb[GuidValue.upper()] = Protocol
|
||||||
|
for Ppi in Package.Ppis:
|
||||||
|
GuidValue = GuidStructureStringToGuidString(Package.Ppis[Ppi])
|
||||||
|
self._GuidDb[GuidValue.upper()] = Ppi
|
||||||
|
for Guid in Package.Guids:
|
||||||
|
GuidValue = GuidStructureStringToGuidString(Package.Guids[Guid])
|
||||||
|
self._GuidDb[GuidValue.upper()] = Guid
|
||||||
|
|
||||||
|
##
|
||||||
|
# Parse the binary dependency expression files.
|
||||||
|
#
|
||||||
|
# This function parses the binary dependency expression file and translate it
|
||||||
|
# to the instruction list.
|
||||||
|
#
|
||||||
|
# @param self The object pointer
|
||||||
|
# @param DepexFileName The file name of binary dependency expression file.
|
||||||
|
#
|
||||||
|
def ParseDepexFile(self, DepexFileName):
|
||||||
|
DepexFile = open(DepexFileName, "rb")
|
||||||
|
DepexStatement = []
|
||||||
|
OpCode = DepexFile.read(1)
|
||||||
|
while OpCode:
|
||||||
|
Statement = gOpCodeList[struct.unpack("B", OpCode)[0]]
|
||||||
|
if Statement in ["BEFORE", "AFTER", "PUSH"]:
|
||||||
|
GuidValue = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" % \
|
||||||
|
struct.unpack("LHHBBBBBBBB", DepexFile.read(16))
|
||||||
|
GuidString = self._GuidDb.get(GuidValue, GuidValue)
|
||||||
|
Statement = "%s %s" % (Statement, GuidString)
|
||||||
|
DepexStatement.append(Statement)
|
||||||
|
OpCode = DepexFile.read(1)
|
||||||
|
|
||||||
|
return DepexStatement
|
||||||
|
|
||||||
##
|
##
|
||||||
# Reports library information
|
# Reports library information
|
||||||
#
|
#
|
||||||
|
@ -254,6 +312,7 @@ class DepexReport(object):
|
||||||
#
|
#
|
||||||
def __init__(self, M):
|
def __init__(self, M):
|
||||||
self.Depex = ""
|
self.Depex = ""
|
||||||
|
self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex")
|
||||||
ModuleType = M.ModuleType
|
ModuleType = M.ModuleType
|
||||||
if not ModuleType:
|
if not ModuleType:
|
||||||
ModuleType = gComponentType2ModuleType.get(M.ComponentType, "")
|
ModuleType = gComponentType2ModuleType.get(M.ComponentType, "")
|
||||||
|
@ -289,14 +348,25 @@ class DepexReport(object):
|
||||||
#
|
#
|
||||||
# This function generates report for the module dependency expression.
|
# This function generates report for the module dependency expression.
|
||||||
#
|
#
|
||||||
# @param self The object pointer
|
# @param self The object pointer
|
||||||
# @param File The file object for report
|
# @param File The file object for report
|
||||||
|
# @param GlobalDepexParser The platform global Dependency expression parser object
|
||||||
#
|
#
|
||||||
def GenerateReport(self, File):
|
def GenerateReport(self, File, GlobalDepexParser):
|
||||||
if not self.Depex:
|
if not self.Depex:
|
||||||
return
|
return
|
||||||
|
|
||||||
FileWrite(File, gSubSectionStart)
|
FileWrite(File, gSubSectionStart)
|
||||||
|
if os.path.isfile(self._DepexFileName):
|
||||||
|
try:
|
||||||
|
DepexStatements = GlobalDepexParser.ParseDepexFile(self._DepexFileName)
|
||||||
|
FileWrite(File, "Final Dependency Expression (DEPEX) Instructions")
|
||||||
|
for DepexStatement in DepexStatements:
|
||||||
|
FileWrite(File, " %s" % DepexStatement)
|
||||||
|
FileWrite(File, gSubSectionSep)
|
||||||
|
except:
|
||||||
|
EdkLogger.warn(None, "Dependency expression file is corrupted", self._DepexFileName)
|
||||||
|
|
||||||
FileWrite(File, "Dependency Expression (DEPEX) from %s" % self.Source)
|
FileWrite(File, "Dependency Expression (DEPEX) from %s" % self.Source)
|
||||||
|
|
||||||
if self.Source == "INF":
|
if self.Source == "INF":
|
||||||
|
@ -453,12 +523,14 @@ class ModuleReport(object):
|
||||||
# This function generates report for separate module expression
|
# This function generates report for separate module expression
|
||||||
# in a platform build.
|
# in a platform build.
|
||||||
#
|
#
|
||||||
# @param self The object pointer
|
# @param self The object pointer
|
||||||
# @param File The file object for report
|
# @param File The file object for report
|
||||||
# @param GlobalPcdReport The platform global PCD class object
|
# @param GlobalPcdReport The platform global PCD report object
|
||||||
# @param ReportType The kind of report items in the final report file
|
# @param GlobalPredictionReport The platform global Prediction report object
|
||||||
|
# @param GlobalDepexParser The platform global Dependency expression parser object
|
||||||
|
# @param ReportType The kind of report items in the final report file
|
||||||
#
|
#
|
||||||
def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, ReportType):
|
def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, GlobalDepexParser, ReportType):
|
||||||
FileWrite(File, gSectionStart)
|
FileWrite(File, gSectionStart)
|
||||||
|
|
||||||
FwReportFileName = os.path.join(self._BuildDir, "DEBUG", self.ModuleName + ".txt")
|
FwReportFileName = os.path.join(self._BuildDir, "DEBUG", self.ModuleName + ".txt")
|
||||||
|
@ -505,7 +577,7 @@ class ModuleReport(object):
|
||||||
self.LibraryReport.GenerateReport(File)
|
self.LibraryReport.GenerateReport(File)
|
||||||
|
|
||||||
if "DEPEX" in ReportType:
|
if "DEPEX" in ReportType:
|
||||||
self.DepexReport.GenerateReport(File)
|
self.DepexReport.GenerateReport(File, GlobalDepexParser)
|
||||||
|
|
||||||
if "BUILD_FLAGS" in ReportType:
|
if "BUILD_FLAGS" in ReportType:
|
||||||
self.BuildFlagsReport.GenerateReport(File)
|
self.BuildFlagsReport.GenerateReport(File)
|
||||||
|
@ -1325,6 +1397,10 @@ class PlatformReport(object):
|
||||||
if "FIXED_ADDRESS" in ReportType or "EXECUTION_ORDER" in ReportType:
|
if "FIXED_ADDRESS" in ReportType or "EXECUTION_ORDER" in ReportType:
|
||||||
self.PredictionReport = PredictionReport(Wa)
|
self.PredictionReport = PredictionReport(Wa)
|
||||||
|
|
||||||
|
self.DepexParser = None
|
||||||
|
if "DEPEX" in ReportType:
|
||||||
|
self.DepexParser = DepexParser(Wa)
|
||||||
|
|
||||||
self.ModuleReportList = []
|
self.ModuleReportList = []
|
||||||
if MaList != None:
|
if MaList != None:
|
||||||
self._IsModuleBuild = True
|
self._IsModuleBuild = True
|
||||||
|
@ -1371,7 +1447,7 @@ class PlatformReport(object):
|
||||||
FdReportListItem.GenerateReport(File)
|
FdReportListItem.GenerateReport(File)
|
||||||
|
|
||||||
for ModuleReportItem in self.ModuleReportList:
|
for ModuleReportItem in self.ModuleReportList:
|
||||||
ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, ReportType)
|
ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, self.DepexParser, ReportType)
|
||||||
|
|
||||||
if not self._IsModuleBuild:
|
if not self._IsModuleBuild:
|
||||||
if "EXECUTION_ORDER" in ReportType:
|
if "EXECUTION_ORDER" in ReportType:
|
||||||
|
|
|
@ -23,6 +23,7 @@ import glob
|
||||||
import time
|
import time
|
||||||
import platform
|
import platform
|
||||||
import traceback
|
import traceback
|
||||||
|
import encodings.ascii
|
||||||
|
|
||||||
from struct import *
|
from struct import *
|
||||||
from threading import *
|
from threading import *
|
||||||
|
@ -735,7 +736,7 @@ class Build():
|
||||||
self.LoadFixAddress = 0
|
self.LoadFixAddress = 0
|
||||||
self.UniFlag = UniFlag
|
self.UniFlag = UniFlag
|
||||||
|
|
||||||
# print dot charater during doing some time-consuming work
|
# print dot character during doing some time-consuming work
|
||||||
self.Progress = Utils.Progressor()
|
self.Progress = Utils.Progressor()
|
||||||
|
|
||||||
# parse target.txt, tools_def.txt, and platform file
|
# parse target.txt, tools_def.txt, and platform file
|
||||||
|
@ -1267,9 +1268,9 @@ class Build():
|
||||||
if len (SmmModuleList) > 0:
|
if len (SmmModuleList) > 0:
|
||||||
MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize/0x1000))
|
MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize/0x1000))
|
||||||
|
|
||||||
PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
|
PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
|
||||||
BtBaseAddr = TopMemoryAddress - RtSize
|
BtBaseAddr = TopMemoryAddress - RtSize
|
||||||
RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize
|
RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize
|
||||||
|
|
||||||
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)
|
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)
|
||||||
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)
|
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)
|
||||||
|
|
Loading…
Reference in New Issue