mirror of https://github.com/acidanthera/audk.git
BaseTools: Decouple AutoGen Objects
BZ: https://bugzilla.tianocore.org/show_bug.cgi?id=1875 1. Separate the AutoGen.py into 3 small py files. One is for AutoGen base class, one is for WorkspaceAutoGen class and PlatformAutoGen class, and the one for ModuleAutoGen class. 2. Create a new class DataPipe to store the Platform scope settings. Create a new class PlatformInfo to provide the same interface as PlatformAutoGen. PlatformInfo class is initialized by DataPipe instance. Create a new class WorkspaceInfo to provide the same interface as WorkspaceAutoGen. WorkspaceInfo class is initialized by DataPipe instance. 3. Change ModuleAutoGen to depends on DataPipe, PlatformInfo and WorkspaceInfo. Remove the dependency of ModuleAutoGen to PlatformAutoGen. Cc: Liming Gao <liming.gao@intel.com> Cc: Steven Shi <steven.shi@intel.com> Signed-off-by: Bob Feng <bob.c.feng@intel.com> Acked-by: Laszlo Ersek <lersek@redhat.com> Tested-by: Laszlo Ersek <lersek@redhat.com> Acked-by: Liming Gao <liming.gao@intel.com>
This commit is contained in:
parent
197ca7febf
commit
e8449e1d8e
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,147 @@
|
|||
## @file
|
||||
# Create makefile for MS nmake and GNU make
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
from __future__ import absolute_import
|
||||
from Workspace.WorkspaceDatabase import BuildDB
|
||||
from Workspace.WorkspaceCommon import GetModuleLibInstances
|
||||
import Common.GlobalData as GlobalData
|
||||
import os
|
||||
import pickle
|
||||
from pickle import HIGHEST_PROTOCOL
|
||||
|
||||
class PCD_DATA():
|
||||
def __init__(self,TokenCName,TokenSpaceGuidCName,Type,DatumType,SkuInfoList,DefaultValue,
|
||||
MaxDatumSize,UserDefinedDefaultStoresFlag,validateranges,
|
||||
validlists,expressions,CustomAttribute,TokenValue):
|
||||
self.TokenCName = TokenCName
|
||||
self.TokenSpaceGuidCName = TokenSpaceGuidCName
|
||||
self.Type = Type
|
||||
self.DatumType = DatumType
|
||||
self.SkuInfoList = SkuInfoList
|
||||
self.DefaultValue = DefaultValue
|
||||
self.MaxDatumSize = MaxDatumSize
|
||||
self.UserDefinedDefaultStoresFlag = UserDefinedDefaultStoresFlag
|
||||
self.validateranges = validateranges
|
||||
self.validlists = validlists
|
||||
self.expressions = expressions
|
||||
self.CustomAttribute = CustomAttribute
|
||||
self.TokenValue = TokenValue
|
||||
|
||||
class DataPipe(object):
|
||||
def __init__(self, BuildDir=None):
|
||||
self.data_container = {}
|
||||
self.BuildDir = BuildDir
|
||||
|
||||
class MemoryDataPipe(DataPipe):
|
||||
|
||||
def Get(self,key):
|
||||
return self.data_container.get(key)
|
||||
|
||||
def dump(self,file_path):
|
||||
with open(file_path,'wb') as fd:
|
||||
pickle.dump(self.data_container,fd,pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def load(self,file_path):
|
||||
with open(file_path,'rb') as fd:
|
||||
self.data_container = pickle.load(fd)
|
||||
|
||||
@property
|
||||
def DataContainer(self):
|
||||
return self.data_container
|
||||
@DataContainer.setter
|
||||
def DataContainer(self,data):
|
||||
self.data_container.update(data)
|
||||
|
||||
def FillData(self,PlatformInfo):
|
||||
#Platform Pcds
|
||||
self.DataContainer = {
|
||||
"PLA_PCD" : [PCD_DATA(
|
||||
pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
|
||||
pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
|
||||
pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
|
||||
pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue)
|
||||
for pcd in PlatformInfo.Platform.Pcds.values()]
|
||||
}
|
||||
|
||||
#Platform Module Pcds
|
||||
ModulePcds = {}
|
||||
for m in PlatformInfo.Platform.Modules:
|
||||
m_pcds = PlatformInfo.Platform.Modules[m].Pcds
|
||||
if m_pcds:
|
||||
ModulePcds[(m.File,m.Root)] = [PCD_DATA(
|
||||
pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
|
||||
pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
|
||||
pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
|
||||
pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue)
|
||||
for pcd in PlatformInfo.Platform.Modules[m].Pcds.values()]
|
||||
|
||||
|
||||
self.DataContainer = {"MOL_PCDS":ModulePcds}
|
||||
|
||||
#Module's Library Instance
|
||||
ModuleLibs = {}
|
||||
for m in PlatformInfo.Platform.Modules:
|
||||
module_obj = BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain]
|
||||
Libs = GetModuleLibInstances(module_obj, PlatformInfo.Platform, BuildDB.BuildObject, PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain)
|
||||
ModuleLibs[(m.File,m.Root,module_obj.Arch)] = [(l.MetaFile.File,l.MetaFile.Root,l.Arch) for l in Libs]
|
||||
self.DataContainer = {"DEPS":ModuleLibs}
|
||||
|
||||
#Platform BuildOptions
|
||||
|
||||
platform_build_opt = PlatformInfo.EdkIIBuildOption
|
||||
|
||||
ToolDefinition = PlatformInfo.ToolDefinition
|
||||
module_build_opt = {}
|
||||
for m in PlatformInfo.Platform.Modules:
|
||||
ModuleTypeOptions, PlatformModuleOptions = PlatformInfo.GetGlobalBuildOptions(BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain])
|
||||
if ModuleTypeOptions or PlatformModuleOptions:
|
||||
module_build_opt.update({(m.File,m.Root): {"ModuleTypeOptions":ModuleTypeOptions, "PlatformModuleOptions":PlatformModuleOptions}})
|
||||
|
||||
self.DataContainer = {"PLA_BO":platform_build_opt,
|
||||
"TOOLDEF":ToolDefinition,
|
||||
"MOL_BO":module_build_opt
|
||||
}
|
||||
|
||||
|
||||
|
||||
#Platform Info
|
||||
PInfo = {
|
||||
"WorkspaceDir":PlatformInfo.Workspace.WorkspaceDir,
|
||||
"Target":PlatformInfo.BuildTarget,
|
||||
"ToolChain":PlatformInfo.Workspace.ToolChain,
|
||||
"BuildRuleFile":PlatformInfo.BuildRule,
|
||||
"Arch": PlatformInfo.Arch,
|
||||
"ArchList":PlatformInfo.Workspace.ArchList,
|
||||
"ActivePlatform":PlatformInfo.MetaFile
|
||||
}
|
||||
self.DataContainer = {'P_Info':PInfo}
|
||||
|
||||
self.DataContainer = {'M_Name':PlatformInfo.UniqueBaseName}
|
||||
|
||||
self.DataContainer = {"ToolChainFamily": PlatformInfo.ToolChainFamily}
|
||||
|
||||
self.DataContainer = {"BuildRuleFamily": PlatformInfo.BuildRuleFamily}
|
||||
|
||||
self.DataContainer = {"MixedPcd":GlobalData.MixedPcd}
|
||||
|
||||
self.DataContainer = {"BuildOptPcd":GlobalData.BuildOptionPcd}
|
||||
|
||||
self.DataContainer = {"BuildCommand": PlatformInfo.BuildCommand}
|
||||
|
||||
self.DataContainer = {"AsBuildModuleList": PlatformInfo._AsBuildModuleList}
|
||||
|
||||
self.DataContainer = {"G_defines": GlobalData.gGlobalDefines}
|
||||
|
||||
self.DataContainer = {"CL_defines": GlobalData.gCommandLineDefines}
|
||||
|
||||
self.DataContainer = {"Env_Var": {k:v for k, v in os.environ.items()}}
|
||||
|
||||
self.DataContainer = {"PackageList": [(dec.MetaFile,dec.Arch) for dec in PlatformInfo.PackageList]}
|
||||
|
||||
self.DataContainer = {"GuidDict": PlatformInfo.Platform._GuidDict}
|
||||
|
||||
self.DataContainer = {"FdfParser": True if GlobalData.gFdfParser else False}
|
||||
|
|
@ -1629,7 +1629,7 @@ def CreatePcdCode(Info, AutoGenC, AutoGenH):
|
|||
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenSpaceGuidCName not in TokenSpaceList:
|
||||
TokenSpaceList.append(Pcd.TokenSpaceGuidCName)
|
||||
|
||||
SkuMgr = Info.Workspace.Platform.SkuIdMgr
|
||||
SkuMgr = Info.PlatformInfo.Platform.SkuIdMgr
|
||||
AutoGenH.Append("\n// Definition of SkuId Array\n")
|
||||
AutoGenH.Append("extern UINT64 _gPcd_SkuId_Array[];\n")
|
||||
# Add extern declarations to AutoGen.h if one or more Token Space GUIDs were found
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,619 @@
|
|||
## @file
|
||||
# Create makefile for MS nmake and GNU make
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
from __future__ import absolute_import
|
||||
from Workspace.WorkspaceDatabase import WorkspaceDatabase,BuildDB
|
||||
from Common.caching import cached_property
|
||||
from AutoGen.BuildEngine import BuildRule,AutoGenReqBuildRuleVerNum
|
||||
from AutoGen.AutoGen import CalculatePriorityValue
|
||||
from Common.Misc import CheckPcdDatum,GuidValue
|
||||
from Common.Expression import ValueExpressionEx
|
||||
from Common.DataType import *
|
||||
from CommonDataClass.Exceptions import *
|
||||
from CommonDataClass.CommonClass import SkuInfoClass
|
||||
import Common.EdkLogger as EdkLogger
|
||||
from Common.BuildToolError import OPTION_CONFLICT,FORMAT_INVALID,RESOURCE_NOT_AVAILABLE
|
||||
from Common.MultipleWorkspace import MultipleWorkspace as mws
|
||||
from collections import defaultdict
|
||||
from Common.Misc import PathClass
|
||||
import os
|
||||
|
||||
|
||||
#
|
||||
# The priority list while override build option
|
||||
#
|
||||
PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
|
||||
"0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
"0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
"0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
"0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
|
||||
"0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
|
||||
"0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
|
||||
"0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
|
||||
"0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
|
||||
"0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
|
||||
"0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
|
||||
"0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
|
||||
"0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
|
||||
"0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
|
||||
"0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
|
||||
"0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
|
||||
## Base class for AutoGen
|
||||
#
|
||||
# This class just implements the cache mechanism of AutoGen objects.
|
||||
#
|
||||
class AutoGenInfo(object):
|
||||
# database to maintain the objects in each child class
|
||||
__ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object
|
||||
|
||||
## Factory method
|
||||
#
|
||||
# @param Class class object of real AutoGen class
|
||||
# (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
|
||||
# @param Workspace Workspace directory or WorkspaceAutoGen object
|
||||
# @param MetaFile The path of meta file
|
||||
# @param Target Build target
|
||||
# @param Toolchain Tool chain name
|
||||
# @param Arch Target arch
|
||||
# @param *args The specific class related parameters
|
||||
# @param **kwargs The specific class related dict parameters
|
||||
#
|
||||
@classmethod
|
||||
def GetCache(cls):
|
||||
return cls.__ObjectCache
|
||||
def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
||||
# check if the object has been created
|
||||
Key = (Target, Toolchain, Arch, MetaFile)
|
||||
if Key in cls.__ObjectCache:
|
||||
# if it exists, just return it directly
|
||||
return cls.__ObjectCache[Key]
|
||||
# it didnt exist. create it, cache it, then return it
|
||||
RetVal = cls.__ObjectCache[Key] = super(AutoGenInfo, cls).__new__(cls)
|
||||
return RetVal
|
||||
|
||||
|
||||
## hash() operator
|
||||
#
|
||||
# The file path of platform file will be used to represent hash value of this object
|
||||
#
|
||||
# @retval int Hash value of the file path of platform file
|
||||
#
|
||||
def __hash__(self):
|
||||
return hash(self.MetaFile)
|
||||
|
||||
## str() operator
|
||||
#
|
||||
# The file path of platform file will be used to represent this object
|
||||
#
|
||||
# @retval string String of platform file path
|
||||
#
|
||||
def __str__(self):
|
||||
return str(self.MetaFile)
|
||||
|
||||
## "==" operator
|
||||
def __eq__(self, Other):
|
||||
return Other and self.MetaFile == Other
|
||||
|
||||
## Expand * in build option key
|
||||
#
|
||||
# @param Options Options to be expanded
|
||||
# @param ToolDef Use specified ToolDef instead of full version.
|
||||
# This is needed during initialization to prevent
|
||||
# infinite recursion betweeh BuildOptions,
|
||||
# ToolDefinition, and this function.
|
||||
#
|
||||
# @retval options Options expanded
|
||||
#
|
||||
def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):
|
||||
if not ToolDef:
|
||||
ToolDef = self.ToolDefinition
|
||||
BuildOptions = {}
|
||||
FamilyMatch = False
|
||||
FamilyIsNull = True
|
||||
|
||||
OverrideList = {}
|
||||
#
|
||||
# Construct a list contain the build options which need override.
|
||||
#
|
||||
for Key in Options:
|
||||
#
|
||||
# Key[0] -- tool family
|
||||
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
#
|
||||
if (Key[0] == self.BuildRuleFamily and
|
||||
(ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
|
||||
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
|
||||
if (Target == self.BuildTarget or Target == TAB_STAR) and\
|
||||
(ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\
|
||||
(Arch == self.Arch or Arch == TAB_STAR) and\
|
||||
Options[Key].startswith("="):
|
||||
|
||||
if OverrideList.get(Key[1]) is not None:
|
||||
OverrideList.pop(Key[1])
|
||||
OverrideList[Key[1]] = Options[Key]
|
||||
|
||||
#
|
||||
# Use the highest priority value.
|
||||
#
|
||||
if (len(OverrideList) >= 2):
|
||||
KeyList = list(OverrideList.keys())
|
||||
for Index in range(len(KeyList)):
|
||||
NowKey = KeyList[Index]
|
||||
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
|
||||
for Index1 in range(len(KeyList) - Index - 1):
|
||||
NextKey = KeyList[Index1 + Index + 1]
|
||||
#
|
||||
# Compare two Key, if one is included by another, choose the higher priority one
|
||||
#
|
||||
Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
|
||||
if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\
|
||||
(ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\
|
||||
(Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\
|
||||
(CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\
|
||||
(Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):
|
||||
|
||||
if CalculatePriorityValue(NowKey) > CalculatePriorityValue(NextKey):
|
||||
if Options.get((self.BuildRuleFamily, NextKey)) is not None:
|
||||
Options.pop((self.BuildRuleFamily, NextKey))
|
||||
else:
|
||||
if Options.get((self.BuildRuleFamily, NowKey)) is not None:
|
||||
Options.pop((self.BuildRuleFamily, NowKey))
|
||||
|
||||
for Key in Options:
|
||||
if ModuleStyle is not None and len (Key) > 2:
|
||||
# Check Module style is EDK or EDKII.
|
||||
# Only append build option for the matched style module.
|
||||
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
|
||||
continue
|
||||
elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
|
||||
continue
|
||||
Family = Key[0]
|
||||
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
|
||||
# if tool chain family doesn't match, skip it
|
||||
if Tool in ToolDef and Family != "":
|
||||
FamilyIsNull = False
|
||||
if ToolDef[Tool].get(TAB_TOD_DEFINES_BUILDRULEFAMILY, "") != "":
|
||||
if Family != ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
|
||||
continue
|
||||
elif Family != ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
|
||||
continue
|
||||
FamilyMatch = True
|
||||
# expand any wildcard
|
||||
if Target == TAB_STAR or Target == self.BuildTarget:
|
||||
if Tag == TAB_STAR or Tag == self.ToolChain:
|
||||
if Arch == TAB_STAR or Arch == self.Arch:
|
||||
if Tool not in BuildOptions:
|
||||
BuildOptions[Tool] = {}
|
||||
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
|
||||
BuildOptions[Tool][Attr] = Options[Key]
|
||||
else:
|
||||
# append options for the same tool except PATH
|
||||
if Attr != 'PATH':
|
||||
BuildOptions[Tool][Attr] += " " + Options[Key]
|
||||
else:
|
||||
BuildOptions[Tool][Attr] = Options[Key]
|
||||
# Build Option Family has been checked, which need't to be checked again for family.
|
||||
if FamilyMatch or FamilyIsNull:
|
||||
return BuildOptions
|
||||
|
||||
for Key in Options:
|
||||
if ModuleStyle is not None and len (Key) > 2:
|
||||
# Check Module style is EDK or EDKII.
|
||||
# Only append build option for the matched style module.
|
||||
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
|
||||
continue
|
||||
elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
|
||||
continue
|
||||
Family = Key[0]
|
||||
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
|
||||
# if tool chain family doesn't match, skip it
|
||||
if Tool not in ToolDef or Family == "":
|
||||
continue
|
||||
# option has been added before
|
||||
if Family != ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
|
||||
continue
|
||||
|
||||
# expand any wildcard
|
||||
if Target == TAB_STAR or Target == self.BuildTarget:
|
||||
if Tag == TAB_STAR or Tag == self.ToolChain:
|
||||
if Arch == TAB_STAR or Arch == self.Arch:
|
||||
if Tool not in BuildOptions:
|
||||
BuildOptions[Tool] = {}
|
||||
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
|
||||
BuildOptions[Tool][Attr] = Options[Key]
|
||||
else:
|
||||
# append options for the same tool except PATH
|
||||
if Attr != 'PATH':
|
||||
BuildOptions[Tool][Attr] += " " + Options[Key]
|
||||
else:
|
||||
BuildOptions[Tool][Attr] = Options[Key]
|
||||
return BuildOptions
|
||||
#
|
||||
#This class is the pruned WorkSpaceAutoGen for ModuleAutoGen in multiple thread
|
||||
#
|
||||
class WorkSpaceInfo(AutoGenInfo):
|
||||
def __init__(self,Workspace, MetaFile, Target, ToolChain, Arch):
|
||||
self._SrcTimeStamp = 0
|
||||
self.Db = BuildDB
|
||||
self.BuildDatabase = self.Db.BuildObject
|
||||
self.Target = Target
|
||||
self.ToolChain = ToolChain
|
||||
self.WorkspaceDir = Workspace
|
||||
self.ActivePlatform = MetaFile
|
||||
self.ArchList = Arch
|
||||
|
||||
|
||||
class PlatformInfo(AutoGenInfo):
|
||||
def __init__(self, Workspace, MetaFile, Target, ToolChain, Arch,DataPipe):
|
||||
self.Wa = Workspace
|
||||
self.WorkspaceDir = self.Wa.WorkspaceDir
|
||||
self.MetaFile = MetaFile
|
||||
self.Arch = Arch
|
||||
self.Target = Target
|
||||
self.BuildTarget = Target
|
||||
self.ToolChain = ToolChain
|
||||
self.Platform = self.Wa.BuildDatabase[self.MetaFile, self.Arch, self.Target, self.ToolChain]
|
||||
|
||||
self.SourceDir = MetaFile.SubDir
|
||||
self.DataPipe = DataPipe
|
||||
@cached_property
|
||||
def _AsBuildModuleList(self):
|
||||
retVal = self.DataPipe.Get("AsBuildModuleList")
|
||||
if retVal is None:
|
||||
retVal = {}
|
||||
return retVal
|
||||
|
||||
## Test if a module is supported by the platform
|
||||
#
|
||||
# An error will be raised directly if the module or its arch is not supported
|
||||
# by the platform or current configuration
|
||||
#
|
||||
def ValidModule(self, Module):
|
||||
return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
|
||||
or Module in self._AsBuildModuleList
|
||||
|
||||
@cached_property
|
||||
def ToolChainFamily(self):
|
||||
retVal = self.DataPipe.Get("ToolChainFamily")
|
||||
if retVal is None:
|
||||
retVal = {}
|
||||
return retVal
|
||||
|
||||
@cached_property
|
||||
def BuildRuleFamily(self):
|
||||
retVal = self.DataPipe.Get("BuildRuleFamily")
|
||||
if retVal is None:
|
||||
retVal = {}
|
||||
return retVal
|
||||
|
||||
@cached_property
|
||||
def _MbList(self):
|
||||
return [self.Wa.BuildDatabase[m, self.Arch, self.BuildTarget, self.ToolChain] for m in self.Platform.Modules]
|
||||
|
||||
@cached_property
|
||||
def PackageList(self):
|
||||
RetVal = set()
|
||||
for dec_file,Arch in self.DataPipe.Get("PackageList"):
|
||||
RetVal.add(self.Wa.BuildDatabase[dec_file,Arch,self.BuildTarget, self.ToolChain])
|
||||
return list(RetVal)
|
||||
|
||||
## Return the directory to store all intermediate and final files built
|
||||
@cached_property
|
||||
def BuildDir(self):
|
||||
if os.path.isabs(self.OutputDir):
|
||||
RetVal = os.path.join(
|
||||
os.path.abspath(self.OutputDir),
|
||||
self.Target + "_" + self.ToolChain,
|
||||
)
|
||||
else:
|
||||
RetVal = os.path.join(
|
||||
self.WorkspaceDir,
|
||||
self.OutputDir,
|
||||
self.Target + "_" + self.ToolChain,
|
||||
)
|
||||
return RetVal
|
||||
|
||||
## Return the build output directory platform specifies
|
||||
@cached_property
|
||||
def OutputDir(self):
|
||||
return self.Platform.OutputDirectory
|
||||
|
||||
## Return platform name
|
||||
@cached_property
|
||||
def Name(self):
|
||||
return self.Platform.PlatformName
|
||||
|
||||
## Return meta-file GUID
|
||||
@cached_property
|
||||
def Guid(self):
|
||||
return self.Platform.Guid
|
||||
|
||||
## Return platform version
|
||||
@cached_property
|
||||
def Version(self):
|
||||
return self.Platform.Version
|
||||
|
||||
## Return paths of tools
|
||||
@cached_property
|
||||
def ToolDefinition(self):
|
||||
retVal = self.DataPipe.Get("TOOLDEF")
|
||||
if retVal is None:
|
||||
retVal = {}
|
||||
return retVal
|
||||
|
||||
## Return build command string
|
||||
#
|
||||
# @retval string Build command string
|
||||
#
|
||||
@cached_property
|
||||
def BuildCommand(self):
|
||||
retVal = self.DataPipe.Get("BuildCommand")
|
||||
if retVal is None:
|
||||
retVal = []
|
||||
return retVal
|
||||
|
||||
@cached_property
|
||||
def PcdTokenNumber(self):
|
||||
retVal = self.DataPipe.Get("PCD_TNUM")
|
||||
if retVal is None:
|
||||
retVal = {}
|
||||
return retVal
|
||||
|
||||
## Override PCD setting (type, value, ...)
|
||||
#
|
||||
# @param ToPcd The PCD to be overridden
|
||||
# @param FromPcd The PCD overriding from
|
||||
#
|
||||
def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
|
||||
#
|
||||
# in case there's PCDs coming from FDF file, which have no type given.
|
||||
# at this point, ToPcd.Type has the type found from dependent
|
||||
# package
|
||||
#
|
||||
TokenCName = ToPcd.TokenCName
|
||||
for PcdItem in self.MixedPcd:
|
||||
if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in self.MixedPcd[PcdItem]:
|
||||
TokenCName = PcdItem[0]
|
||||
break
|
||||
if FromPcd is not None:
|
||||
if ToPcd.Pending and FromPcd.Type:
|
||||
ToPcd.Type = FromPcd.Type
|
||||
elif ToPcd.Type and FromPcd.Type\
|
||||
and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:
|
||||
if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:
|
||||
ToPcd.Type = FromPcd.Type
|
||||
elif ToPcd.Type and FromPcd.Type \
|
||||
and ToPcd.Type != FromPcd.Type:
|
||||
if Library:
|
||||
Module = str(Module) + " 's library file (" + str(Library) + ")"
|
||||
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
|
||||
ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\
|
||||
% (ToPcd.TokenSpaceGuidCName, TokenCName,
|
||||
ToPcd.Type, Module, FromPcd.Type, Msg),
|
||||
File=self.MetaFile)
|
||||
|
||||
if FromPcd.MaxDatumSize:
|
||||
ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
|
||||
ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize
|
||||
if FromPcd.DefaultValue:
|
||||
ToPcd.DefaultValue = FromPcd.DefaultValue
|
||||
if FromPcd.TokenValue:
|
||||
ToPcd.TokenValue = FromPcd.TokenValue
|
||||
if FromPcd.DatumType:
|
||||
ToPcd.DatumType = FromPcd.DatumType
|
||||
if FromPcd.SkuInfoList:
|
||||
ToPcd.SkuInfoList = FromPcd.SkuInfoList
|
||||
if FromPcd.UserDefinedDefaultStoresFlag:
|
||||
ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag
|
||||
# Add Flexible PCD format parse
|
||||
if ToPcd.DefaultValue:
|
||||
try:
|
||||
ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self._GuidDict)(True)
|
||||
except BadExpression as Value:
|
||||
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),
|
||||
File=self.MetaFile)
|
||||
|
||||
# check the validation of datum
|
||||
IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
|
||||
if not IsValid:
|
||||
EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
|
||||
ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
|
||||
ToPcd.validateranges = FromPcd.validateranges
|
||||
ToPcd.validlists = FromPcd.validlists
|
||||
ToPcd.expressions = FromPcd.expressions
|
||||
ToPcd.CustomAttribute = FromPcd.CustomAttribute
|
||||
|
||||
if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:
|
||||
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
|
||||
% (ToPcd.TokenSpaceGuidCName, TokenCName))
|
||||
Value = ToPcd.DefaultValue
|
||||
if not Value:
|
||||
ToPcd.MaxDatumSize = '1'
|
||||
elif Value[0] == 'L':
|
||||
ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
|
||||
elif Value[0] == '{':
|
||||
ToPcd.MaxDatumSize = str(len(Value.split(',')))
|
||||
else:
|
||||
ToPcd.MaxDatumSize = str(len(Value) - 1)
|
||||
|
||||
# apply default SKU for dynamic PCDS if specified one is not available
|
||||
if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \
|
||||
and not ToPcd.SkuInfoList:
|
||||
if self.Platform.SkuName in self.Platform.SkuIds:
|
||||
SkuName = self.Platform.SkuName
|
||||
else:
|
||||
SkuName = TAB_DEFAULT
|
||||
ToPcd.SkuInfoList = {
|
||||
SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)
|
||||
}
|
||||
|
||||
def ApplyPcdSetting(self, Module, Pcds, Library=""):
|
||||
# for each PCD in module
|
||||
for Name, Guid in Pcds:
|
||||
PcdInModule = Pcds[Name, Guid]
|
||||
# find out the PCD setting in platform
|
||||
if (Name, Guid) in self.Pcds:
|
||||
PcdInPlatform = self.Pcds[Name, Guid]
|
||||
else:
|
||||
PcdInPlatform = None
|
||||
# then override the settings if any
|
||||
self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)
|
||||
# resolve the VariableGuid value
|
||||
for SkuId in PcdInModule.SkuInfoList:
|
||||
Sku = PcdInModule.SkuInfoList[SkuId]
|
||||
if Sku.VariableGuid == '': continue
|
||||
Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
|
||||
if Sku.VariableGuidValue is None:
|
||||
PackageList = "\n\t".join(str(P) for P in self.PackageList)
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
RESOURCE_NOT_AVAILABLE,
|
||||
"Value of GUID [%s] is not found in" % Sku.VariableGuid,
|
||||
ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
|
||||
% (Guid, Name, str(Module)),
|
||||
File=self.MetaFile
|
||||
)
|
||||
|
||||
# override PCD settings with module specific setting
|
||||
if Module in self.Platform.Modules:
|
||||
PlatformModule = self.Platform.Modules[str(Module)]
|
||||
for Key in PlatformModule.Pcds:
|
||||
if self.BuildOptionPcd:
|
||||
for pcd in self.BuildOptionPcd:
|
||||
(TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
|
||||
if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":
|
||||
PlatformModule.Pcds[Key].DefaultValue = pcdvalue
|
||||
PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue
|
||||
break
|
||||
Flag = False
|
||||
if Key in Pcds:
|
||||
ToPcd = Pcds[Key]
|
||||
Flag = True
|
||||
elif Key in self.MixedPcd:
|
||||
for PcdItem in self.MixedPcd[Key]:
|
||||
if PcdItem in Pcds:
|
||||
ToPcd = Pcds[PcdItem]
|
||||
Flag = True
|
||||
break
|
||||
if Flag:
|
||||
self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)
|
||||
# use PCD value to calculate the MaxDatumSize when it is not specified
|
||||
for Name, Guid in Pcds:
|
||||
Pcd = Pcds[Name, Guid]
|
||||
if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:
|
||||
Pcd.MaxSizeUserSet = None
|
||||
Value = Pcd.DefaultValue
|
||||
if not Value:
|
||||
Pcd.MaxDatumSize = '1'
|
||||
elif Value[0] == 'L':
|
||||
Pcd.MaxDatumSize = str((len(Value) - 2) * 2)
|
||||
elif Value[0] == '{':
|
||||
Pcd.MaxDatumSize = str(len(Value.split(',')))
|
||||
else:
|
||||
Pcd.MaxDatumSize = str(len(Value) - 1)
|
||||
return list(Pcds.values())
|
||||
|
||||
@cached_property
|
||||
def Pcds(self):
|
||||
PlatformPcdData = self.DataPipe.Get("PLA_PCD")
|
||||
# for pcd in PlatformPcdData:
|
||||
# for skuid in pcd.SkuInfoList:
|
||||
# pcd.SkuInfoList[skuid] = self.CreateSkuInfoFromDict(pcd.SkuInfoList[skuid])
|
||||
return {(pcddata.TokenCName,pcddata.TokenSpaceGuidCName):pcddata for pcddata in PlatformPcdData}
|
||||
|
||||
def CreateSkuInfoFromDict(self,SkuInfoDict):
|
||||
return SkuInfoClass(
|
||||
SkuInfoDict.get("SkuIdName"),
|
||||
SkuInfoDict.get("SkuId"),
|
||||
SkuInfoDict.get("VariableName"),
|
||||
SkuInfoDict.get("VariableGuid"),
|
||||
SkuInfoDict.get("VariableOffset"),
|
||||
SkuInfoDict.get("HiiDefaultValue"),
|
||||
SkuInfoDict.get("VpdOffset"),
|
||||
SkuInfoDict.get("DefaultValue"),
|
||||
SkuInfoDict.get("VariableGuidValue"),
|
||||
SkuInfoDict.get("VariableAttribute",""),
|
||||
SkuInfoDict.get("DefaultStore",None)
|
||||
)
|
||||
@cached_property
|
||||
def MixedPcd(self):
|
||||
return self.DataPipe.Get("MixedPcd")
|
||||
@cached_property
|
||||
def _GuidDict(self):
|
||||
RetVal = self.DataPipe.Get("GuidDict")
|
||||
if RetVal is None:
|
||||
RetVal = {}
|
||||
return RetVal
|
||||
@cached_property
|
||||
def BuildOptionPcd(self):
|
||||
return self.DataPipe.Get("BuildOptPcd")
|
||||
def ApplyBuildOption(self,module):
|
||||
PlatformOptions = self.DataPipe.Get("PLA_BO")
|
||||
ModuleBuildOptions = self.DataPipe.Get("MOL_BO")
|
||||
ModuleOptionFromDsc = ModuleBuildOptions.get((module.MetaFile.File,module.MetaFile.Root))
|
||||
if ModuleOptionFromDsc:
|
||||
ModuleTypeOptions, PlatformModuleOptions = ModuleOptionFromDsc["ModuleTypeOptions"],ModuleOptionFromDsc["PlatformModuleOptions"]
|
||||
else:
|
||||
ModuleTypeOptions, PlatformModuleOptions = {}, {}
|
||||
ToolDefinition = self.DataPipe.Get("TOOLDEF")
|
||||
ModuleOptions = self._ExpandBuildOption(module.BuildOptions)
|
||||
BuildRuleOrder = None
|
||||
for Options in [ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
|
||||
for Tool in Options:
|
||||
for Attr in Options[Tool]:
|
||||
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
|
||||
BuildRuleOrder = Options[Tool][Attr]
|
||||
|
||||
AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
|
||||
list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
|
||||
list(ToolDefinition.keys()))
|
||||
BuildOptions = defaultdict(lambda: defaultdict(str))
|
||||
for Tool in AllTools:
|
||||
for Options in [ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
|
||||
if Tool not in Options:
|
||||
continue
|
||||
for Attr in Options[Tool]:
|
||||
#
|
||||
# Do not generate it in Makefile
|
||||
#
|
||||
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
|
||||
continue
|
||||
Value = Options[Tool][Attr]
|
||||
# check if override is indicated
|
||||
if Value.startswith('='):
|
||||
BuildOptions[Tool][Attr] = mws.handleWsMacro(Value[1:])
|
||||
else:
|
||||
if Attr != 'PATH':
|
||||
BuildOptions[Tool][Attr] += " " + mws.handleWsMacro(Value)
|
||||
else:
|
||||
BuildOptions[Tool][Attr] = mws.handleWsMacro(Value)
|
||||
|
||||
return BuildOptions, BuildRuleOrder
|
||||
|
||||
def ApplyLibraryInstance(self,module):
|
||||
alldeps = self.DataPipe.Get("DEPS")
|
||||
if alldeps is None:
|
||||
alldeps = {}
|
||||
mod_libs = alldeps.get((module.MetaFile.File,module.MetaFile.Root,module.Arch,module.MetaFile.Path),[])
|
||||
retVal = []
|
||||
for (file_path,root,arch,abs_path) in mod_libs:
|
||||
libMetaFile = PathClass(file_path,root)
|
||||
libMetaFile.OriginalPath = PathClass(file_path,root)
|
||||
libMetaFile.Path = abs_path
|
||||
retVal.append(self.Wa.BuildDatabase[libMetaFile, arch, self.Target,self.ToolChain])
|
||||
return retVal
|
||||
|
||||
## Parse build_rule.txt in Conf Directory.
|
||||
#
|
||||
# @retval BuildRule object
|
||||
#
|
||||
@cached_property
|
||||
def BuildRule(self):
|
||||
WInfo = self.DataPipe.Get("P_Info")
|
||||
RetVal = WInfo.get("BuildRuleFile")
|
||||
if RetVal._FileVersion == "":
|
||||
RetVal._FileVersion = AutoGenReqBuildRuleVerNum
|
||||
return RetVal
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,904 @@
|
|||
## @file
|
||||
# Create makefile for MS nmake and GNU make
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
|
||||
## Import Modules
|
||||
#
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
import os.path as path
|
||||
import hashlib
|
||||
from collections import defaultdict
|
||||
from GenFds.FdfParser import FdfParser
|
||||
from Workspace.WorkspaceCommon import GetModuleLibInstances
|
||||
from AutoGen import GenMake
|
||||
from AutoGen.AutoGen import AutoGen
|
||||
from AutoGen.PlatformAutoGen import PlatformAutoGen
|
||||
from AutoGen.BuildEngine import gDefaultBuildRuleFile
|
||||
from Common.ToolDefClassObject import gDefaultToolsDefFile
|
||||
from Common.StringUtils import NormPath
|
||||
from Common.BuildToolError import *
|
||||
from Common.DataType import *
|
||||
from Common.Misc import *
|
||||
|
||||
## Regular expression for splitting Dependency Expression string into tokens
|
||||
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
|
||||
|
||||
## Regular expression for match: PCD(xxxx.yyy)
|
||||
gPCDAsGuidPattern = re.compile(r"^PCD\(.+\..+\)$")
|
||||
|
||||
## Workspace AutoGen class
|
||||
#
|
||||
# This class is used mainly to control the whole platform build for different
|
||||
# architecture. This class will generate top level makefile.
|
||||
#
|
||||
class WorkspaceAutoGen(AutoGen):
|
||||
# call super().__init__ then call the worker function with different parameter count
|
||||
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
||||
if not hasattr(self, "_Init"):
|
||||
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
||||
self._Init = True
|
||||
|
||||
## Initialize WorkspaceAutoGen
|
||||
#
|
||||
# @param WorkspaceDir Root directory of workspace
|
||||
# @param ActivePlatform Meta-file of active platform
|
||||
# @param Target Build target
|
||||
# @param Toolchain Tool chain name
|
||||
# @param ArchList List of architecture of current build
|
||||
# @param MetaFileDb Database containing meta-files
|
||||
# @param BuildConfig Configuration of build
|
||||
# @param ToolDefinition Tool chain definitions
|
||||
# @param FlashDefinitionFile File of flash definition
|
||||
# @param Fds FD list to be generated
|
||||
# @param Fvs FV list to be generated
|
||||
# @param Caps Capsule list to be generated
|
||||
# @param SkuId SKU id from command line
|
||||
#
|
||||
def _InitWorker(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,
|
||||
BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,
|
||||
Progress=None, BuildModule=None):
|
||||
self.BuildDatabase = MetaFileDb
|
||||
self.MetaFile = ActivePlatform
|
||||
self.WorkspaceDir = WorkspaceDir
|
||||
self.Platform = self.BuildDatabase[self.MetaFile, TAB_ARCH_COMMON, Target, Toolchain]
|
||||
GlobalData.gActivePlatform = self.Platform
|
||||
self.BuildTarget = Target
|
||||
self.ToolChain = Toolchain
|
||||
self.ArchList = ArchList
|
||||
self.SkuId = SkuId
|
||||
self.UniFlag = UniFlag
|
||||
|
||||
self.TargetTxt = BuildConfig
|
||||
self.ToolDef = ToolDefinition
|
||||
self.FdfFile = FlashDefinitionFile
|
||||
self.FdTargetList = Fds if Fds else []
|
||||
self.FvTargetList = Fvs if Fvs else []
|
||||
self.CapTargetList = Caps if Caps else []
|
||||
self.AutoGenObjectList = []
|
||||
self._GuidDict = {}
|
||||
|
||||
# there's many relative directory operations, so ...
|
||||
os.chdir(self.WorkspaceDir)
|
||||
|
||||
self.MergeArch()
|
||||
self.ValidateBuildTarget()
|
||||
|
||||
EdkLogger.info("")
|
||||
if self.ArchList:
|
||||
EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))
|
||||
EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))
|
||||
EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))
|
||||
|
||||
EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))
|
||||
if BuildModule:
|
||||
EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))
|
||||
|
||||
if self.FdfFile:
|
||||
EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))
|
||||
|
||||
EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)
|
||||
|
||||
if Progress:
|
||||
Progress.Start("\nProcessing meta-data")
|
||||
#
|
||||
# Mark now build in AutoGen Phase
|
||||
#
|
||||
GlobalData.gAutoGenPhase = True
|
||||
self.ProcessModuleFromPdf()
|
||||
self.ProcessPcdType()
|
||||
self.ProcessMixedPcd()
|
||||
self.VerifyPcdsFromFDF()
|
||||
self.CollectAllPcds()
|
||||
self.GeneratePkgLevelHash()
|
||||
#
|
||||
# Check PCDs token value conflict in each DEC file.
|
||||
#
|
||||
self._CheckAllPcdsTokenValueConflict()
|
||||
#
|
||||
# Check PCD type and definition between DSC and DEC
|
||||
#
|
||||
self._CheckPcdDefineAndType()
|
||||
|
||||
self.CreateBuildOptionsFile()
|
||||
self.CreatePcdTokenNumberFile()
|
||||
self.CreateModuleHashInfo()
|
||||
GlobalData.gAutoGenPhase = False
|
||||
|
||||
#
|
||||
# Merge Arch
|
||||
#
|
||||
def MergeArch(self):
|
||||
if not self.ArchList:
|
||||
ArchList = set(self.Platform.SupArchList)
|
||||
else:
|
||||
ArchList = set(self.ArchList) & set(self.Platform.SupArchList)
|
||||
if not ArchList:
|
||||
EdkLogger.error("build", PARAMETER_INVALID,
|
||||
ExtraData = "Invalid ARCH specified. [Valid ARCH: %s]" % (" ".join(self.Platform.SupArchList)))
|
||||
elif self.ArchList and len(ArchList) != len(self.ArchList):
|
||||
SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
|
||||
EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
|
||||
% (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
|
||||
self.ArchList = tuple(ArchList)
|
||||
|
||||
# Validate build target
|
||||
def ValidateBuildTarget(self):
|
||||
if self.BuildTarget not in self.Platform.BuildTargets:
|
||||
EdkLogger.error("build", PARAMETER_INVALID,
|
||||
ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
|
||||
% (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
|
||||
@cached_property
|
||||
def FdfProfile(self):
|
||||
if not self.FdfFile:
|
||||
self.FdfFile = self.Platform.FlashDefinition
|
||||
|
||||
FdfProfile = None
|
||||
if self.FdfFile:
|
||||
Fdf = FdfParser(self.FdfFile.Path)
|
||||
Fdf.ParseFile()
|
||||
GlobalData.gFdfParser = Fdf
|
||||
if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:
|
||||
FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]
|
||||
for FdRegion in FdDict.RegionList:
|
||||
if str(FdRegion.RegionType) is 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
|
||||
if int(FdRegion.Offset) % 8 != 0:
|
||||
EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))
|
||||
FdfProfile = Fdf.Profile
|
||||
else:
|
||||
if self.FdTargetList:
|
||||
EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdTargetList))
|
||||
self.FdTargetList = []
|
||||
if self.FvTargetList:
|
||||
EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvTargetList))
|
||||
self.FvTargetList = []
|
||||
if self.CapTargetList:
|
||||
EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))
|
||||
self.CapTargetList = []
|
||||
|
||||
return FdfProfile
|
||||
|
||||
def ProcessModuleFromPdf(self):
|
||||
|
||||
if self.FdfProfile:
|
||||
for fvname in self.FvTargetList:
|
||||
if fvname.upper() not in self.FdfProfile.FvDict:
|
||||
EdkLogger.error("build", OPTION_VALUE_INVALID,
|
||||
"No such an FV in FDF file: %s" % fvname)
|
||||
|
||||
# In DSC file may use FILE_GUID to override the module, then in the Platform.Modules use FILE_GUIDmodule.inf as key,
|
||||
# but the path (self.MetaFile.Path) is the real path
|
||||
for key in self.FdfProfile.InfDict:
|
||||
if key == 'ArchTBD':
|
||||
MetaFile_cache = defaultdict(set)
|
||||
for Arch in self.ArchList:
|
||||
Current_Platform_cache = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
for Pkey in Current_Platform_cache.Modules:
|
||||
MetaFile_cache[Arch].add(Current_Platform_cache.Modules[Pkey].MetaFile)
|
||||
for Inf in self.FdfProfile.InfDict[key]:
|
||||
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
|
||||
for Arch in self.ArchList:
|
||||
if ModuleFile in MetaFile_cache[Arch]:
|
||||
break
|
||||
else:
|
||||
ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
if not ModuleData.IsBinaryModule:
|
||||
EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
|
||||
|
||||
else:
|
||||
for Arch in self.ArchList:
|
||||
if Arch == key:
|
||||
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
MetaFileList = set()
|
||||
for Pkey in Platform.Modules:
|
||||
MetaFileList.add(Platform.Modules[Pkey].MetaFile)
|
||||
for Inf in self.FdfProfile.InfDict[key]:
|
||||
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
|
||||
if ModuleFile in MetaFileList:
|
||||
continue
|
||||
ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
if not ModuleData.IsBinaryModule:
|
||||
EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
|
||||
|
||||
|
||||
|
||||
# parse FDF file to get PCDs in it, if any
|
||||
def VerifyPcdsFromFDF(self):
|
||||
|
||||
if self.FdfProfile:
|
||||
PcdSet = self.FdfProfile.PcdDict
|
||||
self.VerifyPcdDeclearation(PcdSet)
|
||||
|
||||
def ProcessPcdType(self):
|
||||
for Arch in self.ArchList:
|
||||
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
Platform.Pcds
|
||||
# generate the SourcePcdDict and BinaryPcdDict
|
||||
Libs = []
|
||||
for BuildData in list(self.BuildDatabase._CACHE_.values()):
|
||||
if BuildData.Arch != Arch:
|
||||
continue
|
||||
if BuildData.MetaFile.Ext == '.inf' and str(BuildData) in Platform.Modules :
|
||||
Libs.extend(GetModuleLibInstances(BuildData, Platform,
|
||||
self.BuildDatabase,
|
||||
Arch,
|
||||
self.BuildTarget,
|
||||
self.ToolChain
|
||||
))
|
||||
for BuildData in list(self.BuildDatabase._CACHE_.values()):
|
||||
if BuildData.Arch != Arch:
|
||||
continue
|
||||
if BuildData.MetaFile.Ext == '.inf':
|
||||
for key in BuildData.Pcds:
|
||||
if BuildData.Pcds[key].Pending:
|
||||
if key in Platform.Pcds:
|
||||
PcdInPlatform = Platform.Pcds[key]
|
||||
if PcdInPlatform.Type:
|
||||
BuildData.Pcds[key].Type = PcdInPlatform.Type
|
||||
BuildData.Pcds[key].Pending = False
|
||||
|
||||
if BuildData.MetaFile in Platform.Modules:
|
||||
PlatformModule = Platform.Modules[str(BuildData.MetaFile)]
|
||||
if key in PlatformModule.Pcds:
|
||||
PcdInPlatform = PlatformModule.Pcds[key]
|
||||
if PcdInPlatform.Type:
|
||||
BuildData.Pcds[key].Type = PcdInPlatform.Type
|
||||
BuildData.Pcds[key].Pending = False
|
||||
else:
|
||||
#Pcd used in Library, Pcd Type from reference module if Pcd Type is Pending
|
||||
if BuildData.Pcds[key].Pending:
|
||||
if bool(BuildData.LibraryClass):
|
||||
if BuildData in set(Libs):
|
||||
ReferenceModules = BuildData.ReferenceModules
|
||||
for ReferenceModule in ReferenceModules:
|
||||
if ReferenceModule.MetaFile in Platform.Modules:
|
||||
RefPlatformModule = Platform.Modules[str(ReferenceModule.MetaFile)]
|
||||
if key in RefPlatformModule.Pcds:
|
||||
PcdInReferenceModule = RefPlatformModule.Pcds[key]
|
||||
if PcdInReferenceModule.Type:
|
||||
BuildData.Pcds[key].Type = PcdInReferenceModule.Type
|
||||
BuildData.Pcds[key].Pending = False
|
||||
break
|
||||
|
||||
def ProcessMixedPcd(self):
|
||||
for Arch in self.ArchList:
|
||||
SourcePcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set(),TAB_PCDS_DYNAMIC:set(),TAB_PCDS_FIXED_AT_BUILD:set()}
|
||||
BinaryPcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set()}
|
||||
SourcePcdDict_Keys = SourcePcdDict.keys()
|
||||
BinaryPcdDict_Keys = BinaryPcdDict.keys()
|
||||
|
||||
# generate the SourcePcdDict and BinaryPcdDict
|
||||
|
||||
for BuildData in list(self.BuildDatabase._CACHE_.values()):
|
||||
if BuildData.Arch != Arch:
|
||||
continue
|
||||
if BuildData.MetaFile.Ext == '.inf':
|
||||
for key in BuildData.Pcds:
|
||||
if TAB_PCDS_DYNAMIC_EX in BuildData.Pcds[key].Type:
|
||||
if BuildData.IsBinaryModule:
|
||||
BinaryPcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
|
||||
else:
|
||||
SourcePcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
|
||||
|
||||
elif TAB_PCDS_PATCHABLE_IN_MODULE in BuildData.Pcds[key].Type:
|
||||
if BuildData.MetaFile.Ext == '.inf':
|
||||
if BuildData.IsBinaryModule:
|
||||
BinaryPcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
|
||||
else:
|
||||
SourcePcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
|
||||
|
||||
elif TAB_PCDS_DYNAMIC in BuildData.Pcds[key].Type:
|
||||
SourcePcdDict[TAB_PCDS_DYNAMIC].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
|
||||
elif TAB_PCDS_FIXED_AT_BUILD in BuildData.Pcds[key].Type:
|
||||
SourcePcdDict[TAB_PCDS_FIXED_AT_BUILD].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
|
||||
|
||||
#
|
||||
# A PCD can only use one type for all source modules
|
||||
#
|
||||
for i in SourcePcdDict_Keys:
|
||||
for j in SourcePcdDict_Keys:
|
||||
if i != j:
|
||||
Intersections = SourcePcdDict[i].intersection(SourcePcdDict[j])
|
||||
if len(Intersections) > 0:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"Building modules from source INFs, following PCD use %s and %s access method. It must be corrected to use only one access method." % (i, j),
|
||||
ExtraData='\n\t'.join(str(P[1]+'.'+P[0]) for P in Intersections)
|
||||
)
|
||||
|
||||
#
|
||||
# intersection the BinaryPCD for Mixed PCD
|
||||
#
|
||||
for i in BinaryPcdDict_Keys:
|
||||
for j in BinaryPcdDict_Keys:
|
||||
if i != j:
|
||||
Intersections = BinaryPcdDict[i].intersection(BinaryPcdDict[j])
|
||||
for item in Intersections:
|
||||
NewPcd1 = (item[0] + '_' + i, item[1])
|
||||
NewPcd2 = (item[0] + '_' + j, item[1])
|
||||
if item not in GlobalData.MixedPcd:
|
||||
GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
|
||||
else:
|
||||
if NewPcd1 not in GlobalData.MixedPcd[item]:
|
||||
GlobalData.MixedPcd[item].append(NewPcd1)
|
||||
if NewPcd2 not in GlobalData.MixedPcd[item]:
|
||||
GlobalData.MixedPcd[item].append(NewPcd2)
|
||||
|
||||
#
|
||||
# intersection the SourcePCD and BinaryPCD for Mixed PCD
|
||||
#
|
||||
for i in SourcePcdDict_Keys:
|
||||
for j in BinaryPcdDict_Keys:
|
||||
if i != j:
|
||||
Intersections = SourcePcdDict[i].intersection(BinaryPcdDict[j])
|
||||
for item in Intersections:
|
||||
NewPcd1 = (item[0] + '_' + i, item[1])
|
||||
NewPcd2 = (item[0] + '_' + j, item[1])
|
||||
if item not in GlobalData.MixedPcd:
|
||||
GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
|
||||
else:
|
||||
if NewPcd1 not in GlobalData.MixedPcd[item]:
|
||||
GlobalData.MixedPcd[item].append(NewPcd1)
|
||||
if NewPcd2 not in GlobalData.MixedPcd[item]:
|
||||
GlobalData.MixedPcd[item].append(NewPcd2)
|
||||
|
||||
BuildData = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
for key in BuildData.Pcds:
|
||||
for SinglePcd in GlobalData.MixedPcd:
|
||||
if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
|
||||
for item in GlobalData.MixedPcd[SinglePcd]:
|
||||
Pcd_Type = item[0].split('_')[-1]
|
||||
if (Pcd_Type == BuildData.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and BuildData.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \
|
||||
(Pcd_Type == TAB_PCDS_DYNAMIC and BuildData.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):
|
||||
Value = BuildData.Pcds[key]
|
||||
Value.TokenCName = BuildData.Pcds[key].TokenCName + '_' + Pcd_Type
|
||||
if len(key) == 2:
|
||||
newkey = (Value.TokenCName, key[1])
|
||||
elif len(key) == 3:
|
||||
newkey = (Value.TokenCName, key[1], key[2])
|
||||
del BuildData.Pcds[key]
|
||||
BuildData.Pcds[newkey] = Value
|
||||
break
|
||||
break
|
||||
|
||||
if self.FdfProfile:
|
||||
PcdSet = self.FdfProfile.PcdDict
|
||||
# handle the mixed pcd in FDF file
|
||||
for key in PcdSet:
|
||||
if key in GlobalData.MixedPcd:
|
||||
Value = PcdSet[key]
|
||||
del PcdSet[key]
|
||||
for item in GlobalData.MixedPcd[key]:
|
||||
PcdSet[item] = Value
|
||||
|
||||
#Collect package set information from INF of FDF
|
||||
@cached_property
|
||||
def PkgSet(self):
|
||||
if not self.FdfFile:
|
||||
self.FdfFile = self.Platform.FlashDefinition
|
||||
|
||||
if self.FdfFile:
|
||||
ModuleList = self.FdfProfile.InfList
|
||||
else:
|
||||
ModuleList = []
|
||||
Pkgs = {}
|
||||
for Arch in self.ArchList:
|
||||
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
PkgSet = set()
|
||||
for mb in [self.BuildDatabase[m, Arch, self.BuildTarget, self.ToolChain] for m in Platform.Modules]:
|
||||
PkgSet.update(mb.Packages)
|
||||
for Inf in ModuleList:
|
||||
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
|
||||
if ModuleFile in Platform.Modules:
|
||||
continue
|
||||
ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
PkgSet.update(ModuleData.Packages)
|
||||
Pkgs[Arch] = list(PkgSet)
|
||||
return Pkgs
|
||||
|
||||
def VerifyPcdDeclearation(self,PcdSet):
|
||||
for Arch in self.ArchList:
|
||||
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
|
||||
Pkgs = self.PkgSet[Arch]
|
||||
DecPcds = set()
|
||||
DecPcdsKey = set()
|
||||
for Pkg in Pkgs:
|
||||
for Pcd in Pkg.Pcds:
|
||||
DecPcds.add((Pcd[0], Pcd[1]))
|
||||
DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))
|
||||
|
||||
Platform.SkuName = self.SkuId
|
||||
for Name, Guid,Fileds in PcdSet:
|
||||
if (Name, Guid) not in DecPcds:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
PARSER_ERROR,
|
||||
"PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),
|
||||
File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],
|
||||
Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]
|
||||
)
|
||||
else:
|
||||
# Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.
|
||||
if (Name, Guid, TAB_PCDS_FIXED_AT_BUILD) in DecPcdsKey \
|
||||
or (Name, Guid, TAB_PCDS_PATCHABLE_IN_MODULE) in DecPcdsKey \
|
||||
or (Name, Guid, TAB_PCDS_FEATURE_FLAG) in DecPcdsKey:
|
||||
continue
|
||||
elif (Name, Guid, TAB_PCDS_DYNAMIC) in DecPcdsKey or (Name, Guid, TAB_PCDS_DYNAMIC_EX) in DecPcdsKey:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
PARSER_ERROR,
|
||||
"Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),
|
||||
File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],
|
||||
Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]
|
||||
)
|
||||
def CollectAllPcds(self):
|
||||
|
||||
for Arch in self.ArchList:
|
||||
Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)
|
||||
#
|
||||
# Explicitly collect platform's dynamic PCDs
|
||||
#
|
||||
Pa.CollectPlatformDynamicPcds()
|
||||
Pa.CollectFixedAtBuildPcds()
|
||||
self.AutoGenObjectList.append(Pa)
|
||||
# We need to calculate the PcdTokenNumber after all Arch Pcds are collected.
|
||||
for Arch in self.ArchList:
|
||||
#Pcd TokenNumber
|
||||
Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)
|
||||
self.UpdateModuleDataPipe(Arch, {"PCD_TNUM":Pa.PcdTokenNumber})
|
||||
|
||||
def UpdateModuleDataPipe(self,arch, attr_dict):
|
||||
for (Target, Toolchain, Arch, MetaFile) in AutoGen.Cache():
|
||||
if Arch != arch:
|
||||
continue
|
||||
try:
|
||||
AutoGen.Cache()[(Target, Toolchain, Arch, MetaFile)].DataPipe.DataContainer = attr_dict
|
||||
except Exception:
|
||||
pass
|
||||
#
|
||||
# Generate Package level hash value
|
||||
#
|
||||
def GeneratePkgLevelHash(self):
|
||||
for Arch in self.ArchList:
|
||||
GlobalData.gPackageHash = {}
|
||||
if GlobalData.gUseHashCache:
|
||||
for Pkg in self.PkgSet[Arch]:
|
||||
self._GenPkgLevelHash(Pkg)
|
||||
|
||||
|
||||
def CreateBuildOptionsFile(self):
|
||||
#
|
||||
# Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.
|
||||
#
|
||||
content = 'gCommandLineDefines: '
|
||||
content += str(GlobalData.gCommandLineDefines)
|
||||
content += TAB_LINE_BREAK
|
||||
content += 'BuildOptionPcd: '
|
||||
content += str(GlobalData.BuildOptionPcd)
|
||||
content += TAB_LINE_BREAK
|
||||
content += 'Active Platform: '
|
||||
content += str(self.Platform)
|
||||
content += TAB_LINE_BREAK
|
||||
if self.FdfFile:
|
||||
content += 'Flash Image Definition: '
|
||||
content += str(self.FdfFile)
|
||||
content += TAB_LINE_BREAK
|
||||
SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
|
||||
|
||||
def CreatePcdTokenNumberFile(self):
|
||||
#
|
||||
# Create PcdToken Number file for Dynamic/DynamicEx Pcd.
|
||||
#
|
||||
PcdTokenNumber = 'PcdTokenNumber: '
|
||||
Pa = self.AutoGenObjectList[0]
|
||||
if Pa.PcdTokenNumber:
|
||||
if Pa.DynamicPcdList:
|
||||
for Pcd in Pa.DynamicPcdList:
|
||||
PcdTokenNumber += TAB_LINE_BREAK
|
||||
PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
|
||||
PcdTokenNumber += ' : '
|
||||
PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
|
||||
SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False)
|
||||
|
||||
def CreateModuleHashInfo(self):
|
||||
#
|
||||
# Get set of workspace metafiles
|
||||
#
|
||||
AllWorkSpaceMetaFiles = self._GetMetaFiles(self.BuildTarget, self.ToolChain)
|
||||
|
||||
#
|
||||
# Retrieve latest modified time of all metafiles
|
||||
#
|
||||
SrcTimeStamp = 0
|
||||
for f in AllWorkSpaceMetaFiles:
|
||||
if os.stat(f)[8] > SrcTimeStamp:
|
||||
SrcTimeStamp = os.stat(f)[8]
|
||||
self._SrcTimeStamp = SrcTimeStamp
|
||||
|
||||
if GlobalData.gUseHashCache:
|
||||
m = hashlib.md5()
|
||||
for files in AllWorkSpaceMetaFiles:
|
||||
if files.endswith('.dec'):
|
||||
continue
|
||||
f = open(files, 'rb')
|
||||
Content = f.read()
|
||||
f.close()
|
||||
m.update(Content)
|
||||
SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), False)
|
||||
GlobalData.gPlatformHash = m.hexdigest()
|
||||
|
||||
#
|
||||
# Write metafile list to build directory
|
||||
#
|
||||
AutoGenFilePath = os.path.join(self.BuildDir, 'AutoGen')
|
||||
if os.path.exists (AutoGenFilePath):
|
||||
os.remove(AutoGenFilePath)
|
||||
if not os.path.exists(self.BuildDir):
|
||||
os.makedirs(self.BuildDir)
|
||||
with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
|
||||
for f in AllWorkSpaceMetaFiles:
|
||||
print(f, file=file)
|
||||
return True
|
||||
|
||||
def _GenPkgLevelHash(self, Pkg):
|
||||
if Pkg.PackageName in GlobalData.gPackageHash:
|
||||
return
|
||||
|
||||
PkgDir = os.path.join(self.BuildDir, Pkg.Arch, Pkg.PackageName)
|
||||
CreateDirectory(PkgDir)
|
||||
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')
|
||||
m = hashlib.md5()
|
||||
# Get .dec file's hash value
|
||||
f = open(Pkg.MetaFile.Path, 'rb')
|
||||
Content = f.read()
|
||||
f.close()
|
||||
m.update(Content)
|
||||
# Get include files hash value
|
||||
if Pkg.Includes:
|
||||
for inc in sorted(Pkg.Includes, key=lambda x: str(x)):
|
||||
for Root, Dirs, Files in os.walk(str(inc)):
|
||||
for File in sorted(Files):
|
||||
File_Path = os.path.join(Root, File)
|
||||
f = open(File_Path, 'rb')
|
||||
Content = f.read()
|
||||
f.close()
|
||||
m.update(Content)
|
||||
SaveFileOnChange(HashFile, m.hexdigest(), False)
|
||||
GlobalData.gPackageHash[Pkg.PackageName] = m.hexdigest()
|
||||
|
||||
def _GetMetaFiles(self, Target, Toolchain):
|
||||
AllWorkSpaceMetaFiles = set()
|
||||
#
|
||||
# add fdf
|
||||
#
|
||||
if self.FdfFile:
|
||||
AllWorkSpaceMetaFiles.add (self.FdfFile.Path)
|
||||
for f in GlobalData.gFdfParser.GetAllIncludedFile():
|
||||
AllWorkSpaceMetaFiles.add (f.FileName)
|
||||
#
|
||||
# add dsc
|
||||
#
|
||||
AllWorkSpaceMetaFiles.add(self.MetaFile.Path)
|
||||
|
||||
#
|
||||
# add build_rule.txt & tools_def.txt
|
||||
#
|
||||
AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultBuildRuleFile))
|
||||
AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultToolsDefFile))
|
||||
|
||||
# add BuildOption metafile
|
||||
#
|
||||
AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions'))
|
||||
|
||||
# add PcdToken Number file for Dynamic/DynamicEx Pcd
|
||||
#
|
||||
AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber'))
|
||||
|
||||
for Pa in self.AutoGenObjectList:
|
||||
AllWorkSpaceMetaFiles.add(Pa.ToolDefinitionFile)
|
||||
|
||||
for Arch in self.ArchList:
|
||||
#
|
||||
# add dec
|
||||
#
|
||||
for Package in PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch).PackageList:
|
||||
AllWorkSpaceMetaFiles.add(Package.MetaFile.Path)
|
||||
|
||||
#
|
||||
# add included dsc
|
||||
#
|
||||
for filePath in self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]._RawData.IncludedFiles:
|
||||
AllWorkSpaceMetaFiles.add(filePath.Path)
|
||||
|
||||
return AllWorkSpaceMetaFiles
|
||||
|
||||
def _CheckPcdDefineAndType(self):
|
||||
PcdTypeSet = {TAB_PCDS_FIXED_AT_BUILD,
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE,
|
||||
TAB_PCDS_FEATURE_FLAG,
|
||||
TAB_PCDS_DYNAMIC,
|
||||
TAB_PCDS_DYNAMIC_EX}
|
||||
|
||||
# This dict store PCDs which are not used by any modules with specified arches
|
||||
UnusedPcd = OrderedDict()
|
||||
for Pa in self.AutoGenObjectList:
|
||||
# Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid
|
||||
for Pcd in Pa.Platform.Pcds:
|
||||
PcdType = Pa.Platform.Pcds[Pcd].Type
|
||||
|
||||
# If no PCD type, this PCD comes from FDF
|
||||
if not PcdType:
|
||||
continue
|
||||
|
||||
# Try to remove Hii and Vpd suffix
|
||||
if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):
|
||||
PcdType = TAB_PCDS_DYNAMIC_EX
|
||||
elif PcdType.startswith(TAB_PCDS_DYNAMIC):
|
||||
PcdType = TAB_PCDS_DYNAMIC
|
||||
|
||||
for Package in Pa.PackageList:
|
||||
# Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType
|
||||
if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:
|
||||
break
|
||||
for Type in PcdTypeSet:
|
||||
if (Pcd[0], Pcd[1], Type) in Package.Pcds:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"Type [%s] of PCD [%s.%s] in DSC file doesn't match the type [%s] defined in DEC file." \
|
||||
% (Pa.Platform.Pcds[Pcd].Type, Pcd[1], Pcd[0], Type),
|
||||
ExtraData=None
|
||||
)
|
||||
return
|
||||
else:
|
||||
UnusedPcd.setdefault(Pcd, []).append(Pa.Arch)
|
||||
|
||||
for Pcd in UnusedPcd:
|
||||
EdkLogger.warn(
|
||||
'build',
|
||||
"The PCD was not specified by any INF module in the platform for the given architecture.\n"
|
||||
"\tPCD: [%s.%s]\n\tPlatform: [%s]\n\tArch: %s"
|
||||
% (Pcd[1], Pcd[0], os.path.basename(str(self.MetaFile)), str(UnusedPcd[Pcd])),
|
||||
ExtraData=None
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))
|
||||
|
||||
## Return the directory to store FV files
|
||||
@cached_property
|
||||
def FvDir(self):
|
||||
return path.join(self.BuildDir, TAB_FV_DIRECTORY)
|
||||
|
||||
## Return the directory to store all intermediate and final files built
|
||||
@cached_property
|
||||
def BuildDir(self):
|
||||
return self.AutoGenObjectList[0].BuildDir
|
||||
|
||||
## Return the build output directory platform specifies
|
||||
@cached_property
|
||||
def OutputDir(self):
|
||||
return self.Platform.OutputDirectory
|
||||
|
||||
## Return platform name
|
||||
@cached_property
|
||||
def Name(self):
|
||||
return self.Platform.PlatformName
|
||||
|
||||
## Return meta-file GUID
|
||||
@cached_property
|
||||
def Guid(self):
|
||||
return self.Platform.Guid
|
||||
|
||||
## Return platform version
|
||||
@cached_property
|
||||
def Version(self):
|
||||
return self.Platform.Version
|
||||
|
||||
## Return paths of tools
|
||||
@cached_property
|
||||
def ToolDefinition(self):
|
||||
return self.AutoGenObjectList[0].ToolDefinition
|
||||
|
||||
## Return directory of platform makefile
|
||||
#
|
||||
# @retval string Makefile directory
|
||||
#
|
||||
@cached_property
|
||||
def MakeFileDir(self):
|
||||
return self.BuildDir
|
||||
|
||||
## Return build command string
|
||||
#
|
||||
# @retval string Build command string
|
||||
#
|
||||
@cached_property
|
||||
def BuildCommand(self):
|
||||
# BuildCommand should be all the same. So just get one from platform AutoGen
|
||||
return self.AutoGenObjectList[0].BuildCommand
|
||||
|
||||
## Check the PCDs token value conflict in each DEC file.
|
||||
#
|
||||
# Will cause build break and raise error message while two PCDs conflict.
|
||||
#
|
||||
# @return None
|
||||
#
|
||||
def _CheckAllPcdsTokenValueConflict(self):
|
||||
for Pa in self.AutoGenObjectList:
|
||||
for Package in Pa.PackageList:
|
||||
PcdList = list(Package.Pcds.values())
|
||||
PcdList.sort(key=lambda x: int(x.TokenValue, 0))
|
||||
Count = 0
|
||||
while (Count < len(PcdList) - 1) :
|
||||
Item = PcdList[Count]
|
||||
ItemNext = PcdList[Count + 1]
|
||||
#
|
||||
# Make sure in the same token space the TokenValue should be unique
|
||||
#
|
||||
if (int(Item.TokenValue, 0) == int(ItemNext.TokenValue, 0)):
|
||||
SameTokenValuePcdList = []
|
||||
SameTokenValuePcdList.append(Item)
|
||||
SameTokenValuePcdList.append(ItemNext)
|
||||
RemainPcdListLength = len(PcdList) - Count - 2
|
||||
for ValueSameCount in range(RemainPcdListLength):
|
||||
if int(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue, 0) == int(Item.TokenValue, 0):
|
||||
SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])
|
||||
else:
|
||||
break;
|
||||
#
|
||||
# Sort same token value PCD list with TokenGuid and TokenCName
|
||||
#
|
||||
SameTokenValuePcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
|
||||
SameTokenValuePcdListCount = 0
|
||||
while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
|
||||
Flag = False
|
||||
TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
|
||||
TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
|
||||
|
||||
if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):
|
||||
for PcdItem in GlobalData.MixedPcd:
|
||||
if (TemListItem.TokenCName, TemListItem.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem] or \
|
||||
(TemListItemNext.TokenCName, TemListItemNext.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
|
||||
Flag = True
|
||||
if not Flag:
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\
|
||||
% (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),
|
||||
ExtraData=None
|
||||
)
|
||||
SameTokenValuePcdListCount += 1
|
||||
Count += SameTokenValuePcdListCount
|
||||
Count += 1
|
||||
|
||||
PcdList = list(Package.Pcds.values())
|
||||
PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
|
||||
Count = 0
|
||||
while (Count < len(PcdList) - 1) :
|
||||
Item = PcdList[Count]
|
||||
ItemNext = PcdList[Count + 1]
|
||||
#
|
||||
# Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.
|
||||
#
|
||||
if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (int(Item.TokenValue, 0) != int(ItemNext.TokenValue, 0)):
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
FORMAT_INVALID,
|
||||
"The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\
|
||||
% (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),
|
||||
ExtraData=None
|
||||
)
|
||||
Count += 1
|
||||
## Generate fds command
|
||||
@property
|
||||
def GenFdsCommand(self):
|
||||
return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()
|
||||
|
||||
@property
|
||||
def GenFdsCommandDict(self):
|
||||
FdsCommandDict = {}
|
||||
LogLevel = EdkLogger.GetLevel()
|
||||
if LogLevel == EdkLogger.VERBOSE:
|
||||
FdsCommandDict["verbose"] = True
|
||||
elif LogLevel <= EdkLogger.DEBUG_9:
|
||||
FdsCommandDict["debug"] = LogLevel - 1
|
||||
elif LogLevel == EdkLogger.QUIET:
|
||||
FdsCommandDict["quiet"] = True
|
||||
|
||||
if GlobalData.gEnableGenfdsMultiThread:
|
||||
FdsCommandDict["GenfdsMultiThread"] = True
|
||||
if GlobalData.gIgnoreSource:
|
||||
FdsCommandDict["IgnoreSources"] = True
|
||||
|
||||
FdsCommandDict["OptionPcd"] = []
|
||||
for pcd in GlobalData.BuildOptionPcd:
|
||||
if pcd[2]:
|
||||
pcdname = '.'.join(pcd[0:3])
|
||||
else:
|
||||
pcdname = '.'.join(pcd[0:2])
|
||||
if pcd[3].startswith('{'):
|
||||
FdsCommandDict["OptionPcd"].append(pcdname + '=' + 'H' + '"' + pcd[3] + '"')
|
||||
else:
|
||||
FdsCommandDict["OptionPcd"].append(pcdname + '=' + pcd[3])
|
||||
|
||||
MacroList = []
|
||||
# macros passed to GenFds
|
||||
MacroDict = {}
|
||||
MacroDict.update(GlobalData.gGlobalDefines)
|
||||
MacroDict.update(GlobalData.gCommandLineDefines)
|
||||
for MacroName in MacroDict:
|
||||
if MacroDict[MacroName] != "":
|
||||
MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))
|
||||
else:
|
||||
MacroList.append('"%s"' % MacroName)
|
||||
FdsCommandDict["macro"] = MacroList
|
||||
|
||||
FdsCommandDict["fdf_file"] = [self.FdfFile]
|
||||
FdsCommandDict["build_target"] = self.BuildTarget
|
||||
FdsCommandDict["toolchain_tag"] = self.ToolChain
|
||||
FdsCommandDict["active_platform"] = str(self)
|
||||
|
||||
FdsCommandDict["conf_directory"] = GlobalData.gConfDirectory
|
||||
FdsCommandDict["build_architecture_list"] = ','.join(self.ArchList)
|
||||
FdsCommandDict["platform_build_directory"] = self.BuildDir
|
||||
|
||||
FdsCommandDict["fd"] = self.FdTargetList
|
||||
FdsCommandDict["fv"] = self.FvTargetList
|
||||
FdsCommandDict["cap"] = self.CapTargetList
|
||||
return FdsCommandDict
|
||||
|
||||
## Create makefile for the platform and modules in it
|
||||
#
|
||||
# @param CreateDepsMakeFile Flag indicating if the makefile for
|
||||
# modules will be created as well
|
||||
#
|
||||
def CreateMakeFile(self, CreateDepsMakeFile=False):
|
||||
if not CreateDepsMakeFile:
|
||||
return
|
||||
for Pa in self.AutoGenObjectList:
|
||||
Pa.CreateMakeFile(True)
|
||||
|
||||
## Create autogen code for platform and modules
|
||||
#
|
||||
# Since there's no autogen code for platform, this method will do nothing
|
||||
# if CreateModuleCodeFile is set to False.
|
||||
#
|
||||
# @param CreateDepsCodeFile Flag indicating if creating module's
|
||||
# autogen code file or not
|
||||
#
|
||||
def CreateCodeFile(self, CreateDepsCodeFile=False):
|
||||
if not CreateDepsCodeFile:
|
||||
return
|
||||
for Pa in self.AutoGenObjectList:
|
||||
Pa.CreateCodeFile(True)
|
||||
|
||||
## Create AsBuilt INF file the platform
|
||||
#
|
||||
def CreateAsBuiltInf(self):
|
||||
return
|
||||
|
|
@ -649,7 +649,6 @@ def GuidValue(CName, PackageList, Inffile = None):
|
|||
if CName in GuidKeys:
|
||||
return P.Guids[CName]
|
||||
return None
|
||||
return None
|
||||
|
||||
## A string template class
|
||||
#
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
import Common.LongFilePathOs as os
|
||||
from Common.LongFilePathSupport import OpenLongFilePath as open
|
||||
import sys
|
||||
import re
|
||||
|
||||
from optparse import OptionParser
|
||||
from optparse import make_option
|
||||
|
|
|
@ -1373,11 +1373,11 @@ class DscBuildData(PlatformBuildClassObject):
|
|||
self._PCD_TYPE_STRING_[MODEL_PCD_FEATURE_FLAG],
|
||||
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC],
|
||||
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX]]:
|
||||
self.Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
|
||||
self.Pcds[Name, Guid].DefaultValue = NoFiledValues[( Guid, Name)][0]
|
||||
self._Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
|
||||
self._Pcds[Name, Guid].DefaultValue = NoFiledValues[( Guid, Name)][0]
|
||||
if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC],
|
||||
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX]]:
|
||||
self.Pcds[Name, Guid].SkuInfoList = {TAB_DEFAULT:SkuInfoClass(TAB_DEFAULT, self.SkuIds[TAB_DEFAULT][0], '', '', '', '', '', NoFiledValues[( Guid, Name)][0])}
|
||||
self._Pcds[Name, Guid].SkuInfoList = {TAB_DEFAULT:SkuInfoClass(TAB_DEFAULT, self.SkuIds[TAB_DEFAULT][0], '', '', '', '', '', NoFiledValues[( Guid, Name)][0])}
|
||||
return AllPcds
|
||||
|
||||
def OverrideByFdfOverAll(self,AllPcds):
|
||||
|
@ -1419,8 +1419,8 @@ class DscBuildData(PlatformBuildClassObject):
|
|||
if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
|
||||
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE],
|
||||
self._PCD_TYPE_STRING_[MODEL_PCD_FEATURE_FLAG]]:
|
||||
self.Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
|
||||
self.Pcds[Name, Guid].DefaultValue = Value
|
||||
self._Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
|
||||
self._Pcds[Name, Guid].DefaultValue = Value
|
||||
return AllPcds
|
||||
|
||||
def ParsePcdNameStruct(self,NamePart1,NamePart2):
|
||||
|
|
|
@ -154,6 +154,13 @@ class InfBuildData(ModuleBuildClassObject):
|
|||
self._PcdComments = None
|
||||
self._BuildOptions = None
|
||||
self._DependencyFileList = None
|
||||
self.LibInstances = []
|
||||
self.ReferenceModules = set()
|
||||
self.Guids
|
||||
self.Pcds
|
||||
def SetReferenceModule(self,Module):
|
||||
self.ReferenceModules.add(Module)
|
||||
return self
|
||||
|
||||
## XXX[key] = value
|
||||
def __setitem__(self, key, value):
|
||||
|
@ -705,6 +712,25 @@ class InfBuildData(ModuleBuildClassObject):
|
|||
return RetVal
|
||||
|
||||
@cached_property
|
||||
def ModulePcdList(self):
|
||||
RetVal = self.Pcds
|
||||
return RetVal
|
||||
@cached_property
|
||||
def LibraryPcdList(self):
|
||||
if bool(self.LibraryClass):
|
||||
return []
|
||||
RetVal = {}
|
||||
Pcds = set()
|
||||
for Library in self.LibInstances:
|
||||
PcdsInLibrary = OrderedDict()
|
||||
for Key in Library.Pcds:
|
||||
if Key in self.Pcds or Key in Pcds:
|
||||
continue
|
||||
Pcds.add(Key)
|
||||
PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
|
||||
RetVal[Library] = PcdsInLibrary
|
||||
return RetVal
|
||||
@cached_property
|
||||
def PcdsName(self):
|
||||
PcdsName = set()
|
||||
for Type in (MODEL_PCD_FIXED_AT_BUILD,MODEL_PCD_PATCHABLE_IN_MODULE,MODEL_PCD_FEATURE_FLAG,MODEL_PCD_DYNAMIC,MODEL_PCD_DYNAMIC_EX):
|
||||
|
@ -1030,3 +1056,6 @@ class InfBuildData(ModuleBuildClassObject):
|
|||
if (self.Binaries and not self.Sources) or GlobalData.gIgnoreSource:
|
||||
return True
|
||||
return False
|
||||
def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
|
||||
for Key in CopyFromDict:
|
||||
CopyToDict[Key].extend(CopyFromDict[Key])
|
||||
|
|
|
@ -88,6 +88,8 @@ def GetLiabraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchai
|
|||
return GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain)
|
||||
|
||||
def GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain, FileName = '', EdkLogger = None):
|
||||
if Module.LibInstances:
|
||||
return Module.LibInstances
|
||||
ModuleType = Module.ModuleType
|
||||
|
||||
# add forced library instances (specified under LibraryClasses sections)
|
||||
|
@ -246,4 +248,6 @@ def GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolcha
|
|||
# The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
|
||||
#
|
||||
SortedLibraryList.reverse()
|
||||
Module.LibInstances = SortedLibraryList
|
||||
SortedLibraryList = [lib.SetReferenceModule(Module) for lib in SortedLibraryList]
|
||||
return SortedLibraryList
|
||||
|
|
|
@ -62,6 +62,8 @@ class WorkspaceDatabase(object):
|
|||
}
|
||||
|
||||
_CACHE_ = {} # (FilePath, Arch) : <object>
|
||||
def GetCache(self):
|
||||
return self._CACHE_
|
||||
|
||||
# constructor
|
||||
def __init__(self, WorkspaceDb):
|
||||
|
@ -203,6 +205,7 @@ class WorkspaceDatabase(object):
|
|||
EdkLogger.error('build', PARSER_ERROR, "Failed to parser DSC file: %s" % Dscfile)
|
||||
return Platform
|
||||
|
||||
BuildDB = WorkspaceDatabase()
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
|
|
|
@ -34,7 +34,7 @@ from Common.BuildToolError import FORMAT_INVALID
|
|||
from Common.LongFilePathSupport import OpenLongFilePath as open
|
||||
from Common.MultipleWorkspace import MultipleWorkspace as mws
|
||||
import Common.GlobalData as GlobalData
|
||||
from AutoGen.AutoGen import ModuleAutoGen
|
||||
from AutoGen.ModuleAutoGen import ModuleAutoGen
|
||||
from Common.Misc import PathClass
|
||||
from Common.StringUtils import NormPath
|
||||
from Common.DataType import *
|
||||
|
@ -2142,7 +2142,7 @@ class PlatformReport(object):
|
|||
INFList = GlobalData.gFdfParser.Profile.InfDict[Pa.Arch]
|
||||
for InfName in INFList:
|
||||
InfClass = PathClass(NormPath(InfName), Wa.WorkspaceDir, Pa.Arch)
|
||||
Ma = ModuleAutoGen(Wa, InfClass, Pa.BuildTarget, Pa.ToolChain, Pa.Arch, Wa.MetaFile)
|
||||
Ma = ModuleAutoGen(Wa, InfClass, Pa.BuildTarget, Pa.ToolChain, Pa.Arch, Wa.MetaFile,Pa.DataPile)
|
||||
if Ma is None:
|
||||
continue
|
||||
if Ma not in ModuleAutoGenList:
|
||||
|
|
|
@ -12,42 +12,45 @@
|
|||
# Import Modules
|
||||
#
|
||||
from __future__ import print_function
|
||||
import Common.LongFilePathOs as os
|
||||
import re
|
||||
from __future__ import absolute_import
|
||||
import os.path as path
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import glob
|
||||
import time
|
||||
import platform
|
||||
import traceback
|
||||
import encodings.ascii
|
||||
import multiprocessing
|
||||
|
||||
from struct import *
|
||||
from threading import *
|
||||
from threading import Thread,Event,BoundedSemaphore
|
||||
import threading
|
||||
from subprocess import Popen,PIPE
|
||||
from collections import OrderedDict, defaultdict
|
||||
from optparse import OptionParser
|
||||
from subprocess import *
|
||||
from AutoGen.PlatformAutoGen import PlatformAutoGen
|
||||
from AutoGen.ModuleAutoGen import ModuleAutoGen
|
||||
from AutoGen.WorkspaceAutoGen import WorkspaceAutoGen
|
||||
from AutoGen import GenMake
|
||||
from Common import Misc as Utils
|
||||
|
||||
from Common.LongFilePathSupport import OpenLongFilePath as open
|
||||
from Common.TargetTxtClassObject import TargetTxt
|
||||
from Common.ToolDefClassObject import ToolDef
|
||||
from Common.DataType import *
|
||||
from Common.BuildVersion import gBUILD_VERSION
|
||||
from AutoGen.AutoGen import *
|
||||
from Common.BuildToolError import *
|
||||
from Workspace.WorkspaceDatabase import WorkspaceDatabase
|
||||
from Common.Misc import PathClass,SaveFileOnChange,RemoveDirectory
|
||||
from Common.StringUtils import NormPath
|
||||
from Common.MultipleWorkspace import MultipleWorkspace as mws
|
||||
from Common.BuildToolError import *
|
||||
from Common.DataType import *
|
||||
import Common.EdkLogger as EdkLogger
|
||||
from Common.BuildVersion import gBUILD_VERSION
|
||||
from Workspace.WorkspaceDatabase import BuildDB
|
||||
|
||||
from BuildReport import BuildReport
|
||||
from GenPatchPcdTable.GenPatchPcdTable import *
|
||||
from PatchPcdValue.PatchPcdValue import *
|
||||
from GenPatchPcdTable.GenPatchPcdTable import PeImageClass,parsePcdInfoFromMapFile
|
||||
from PatchPcdValue.PatchPcdValue import PatchBinaryFile
|
||||
|
||||
import Common.EdkLogger
|
||||
import Common.GlobalData as GlobalData
|
||||
from GenFds.GenFds import GenFds, GenFdsApi
|
||||
|
||||
from collections import OrderedDict, defaultdict
|
||||
|
||||
# Version and Copyright
|
||||
VersionNumber = "0.60" + ' ' + gBUILD_VERSION
|
||||
|
@ -775,7 +778,7 @@ class Build():
|
|||
GlobalData.gDatabasePath = os.path.normpath(os.path.join(ConfDirectoryPath, GlobalData.gDatabasePath))
|
||||
if not os.path.exists(os.path.join(GlobalData.gConfDirectory, '.cache')):
|
||||
os.makedirs(os.path.join(GlobalData.gConfDirectory, '.cache'))
|
||||
self.Db = WorkspaceDatabase()
|
||||
self.Db = BuildDB
|
||||
self.BuildDatabase = self.Db.BuildObject
|
||||
self.Platform = None
|
||||
self.ToolChainFamily = None
|
||||
|
@ -1700,13 +1703,17 @@ class Build():
|
|||
CmdListDict = self._GenFfsCmd(Wa.ArchList)
|
||||
|
||||
for Arch in Wa.ArchList:
|
||||
PcdMaList = []
|
||||
GlobalData.gGlobalDefines['ARCH'] = Arch
|
||||
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
|
||||
for Module in Pa.Platform.Modules:
|
||||
# Get ModuleAutoGen object to generate C code file and makefile
|
||||
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
|
||||
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
|
||||
if Ma is None:
|
||||
continue
|
||||
if Ma.PcdIsDriver:
|
||||
Ma.PlatformInfo = Pa
|
||||
PcdMaList.append(Ma)
|
||||
self.BuildModules.append(Ma)
|
||||
self._BuildPa(self.Target, Pa, FfsCommand=CmdListDict)
|
||||
|
||||
|
@ -1802,7 +1809,7 @@ class Build():
|
|||
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
|
||||
for Module in Pa.Platform.Modules:
|
||||
if self.ModuleFile.Dir == Module.Dir and self.ModuleFile.Name == Module.Name:
|
||||
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
|
||||
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
|
||||
if Ma is None:
|
||||
continue
|
||||
MaList.append(Ma)
|
||||
|
@ -1982,6 +1989,7 @@ class Build():
|
|||
ExitFlag.clear()
|
||||
self.AutoGenTime += int(round((time.time() - WorkspaceAutoGenTime)))
|
||||
for Arch in Wa.ArchList:
|
||||
PcdMaList = []
|
||||
AutoGenStart = time.time()
|
||||
GlobalData.gGlobalDefines['ARCH'] = Arch
|
||||
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
|
||||
|
@ -1999,10 +2007,13 @@ class Build():
|
|||
ModuleList.append(Inf)
|
||||
for Module in ModuleList:
|
||||
# Get ModuleAutoGen object to generate C code file and makefile
|
||||
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
|
||||
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
|
||||
|
||||
if Ma is None:
|
||||
continue
|
||||
if Ma.PcdIsDriver:
|
||||
Ma.PlatformInfo = Pa
|
||||
PcdMaList.append(Ma)
|
||||
if Ma.CanSkipbyHash():
|
||||
self.HashSkipModules.append(Ma)
|
||||
if GlobalData.gBinCacheSource:
|
||||
|
|
Loading…
Reference in New Issue