mirror of https://github.com/acidanthera/audk.git
BaseTools: Remove equality operator with None
replace "== None" with "is None" and "!= None" with "is not None" Cc: Yonghong Zhu <yonghong.zhu@intel.com> Cc: Liming Gao <liming.gao@intel.com> Contributed-under: TianoCore Contribution Agreement 1.1 Signed-off-by: Jaben Carsey <jaben.carsey@intel.com> Reviewed-by: Yonghong Zhu <yonghong.zhu@intel.com>
This commit is contained in:
parent
05a32984ab
commit
4231a8193e
|
@ -49,18 +49,18 @@ def parseCmdArgs():
|
|||
|
||||
# validate the options
|
||||
errors = []
|
||||
if options.WorkspacePath == None:
|
||||
if options.WorkspacePath is None:
|
||||
errors.append('- Please specify workspace path via option -w!')
|
||||
elif not os.path.exists(options.WorkspacePath):
|
||||
errors.append("- Invalid workspace path %s! The workspace path should be exist in absolute path!" % options.WorkspacePath)
|
||||
|
||||
if options.PackagePath == None:
|
||||
if options.PackagePath is None:
|
||||
errors.append('- Please specify package DEC file path via option -p!')
|
||||
elif not os.path.exists(options.PackagePath):
|
||||
errors.append("- Invalid package's DEC file path %s! The DEC path should be exist in absolute path!" % options.PackagePath)
|
||||
|
||||
default = "C:\\Program Files\\doxygen\\bin\\doxygen.exe"
|
||||
if options.DoxygenPath == None:
|
||||
if options.DoxygenPath is None:
|
||||
if os.path.exists(default):
|
||||
print "Warning: Assume doxygen tool is installed at %s. If not, please specify via -x" % default
|
||||
options.DoxygenPath = default
|
||||
|
@ -69,7 +69,7 @@ def parseCmdArgs():
|
|||
elif not os.path.exists(options.DoxygenPath):
|
||||
errors.append("- Invalid doxygen tool path %s! The doxygen tool path should be exist in absolute path!" % options.DoxygenPath)
|
||||
|
||||
if options.OutputPath != None:
|
||||
if options.OutputPath is not None:
|
||||
if not os.path.exists(options.OutputPath):
|
||||
# create output
|
||||
try:
|
||||
|
@ -77,7 +77,7 @@ def parseCmdArgs():
|
|||
except:
|
||||
errors.append('- Fail to create the output directory %s' % options.OutputPath)
|
||||
else:
|
||||
if options.PackagePath != None and os.path.exists(options.PackagePath):
|
||||
if options.PackagePath is not None and os.path.exists(options.PackagePath):
|
||||
dirpath = os.path.dirname(options.PackagePath)
|
||||
default = os.path.join (dirpath, "Document")
|
||||
print 'Warning: Assume document output at %s. If not, please specify via option -o' % default
|
||||
|
@ -90,21 +90,21 @@ def parseCmdArgs():
|
|||
else:
|
||||
errors.append('- Please specify document output path via option -o!')
|
||||
|
||||
if options.Arch == None:
|
||||
if options.Arch is None:
|
||||
options.Arch = 'ALL'
|
||||
print "Warning: Assume arch is \"ALL\". If not, specify via -a"
|
||||
|
||||
if options.DocumentMode == None:
|
||||
if options.DocumentMode is None:
|
||||
options.DocumentMode = "HTML"
|
||||
print "Warning: Assume document mode is \"HTML\". If not, specify via -m"
|
||||
|
||||
if options.IncludeOnly == None:
|
||||
if options.IncludeOnly is None:
|
||||
options.IncludeOnly = False
|
||||
print "Warning: Assume generate package document for all package\'s source including publich interfaces and implementation libraries and modules."
|
||||
|
||||
if options.DocumentMode.lower() == 'chm':
|
||||
default = "C:\\Program Files\\HTML Help Workshop\\hhc.exe"
|
||||
if options.HtmlWorkshopPath == None:
|
||||
if options.HtmlWorkshopPath is None:
|
||||
if os.path.exists(default):
|
||||
print 'Warning: Assume the installation path of Microsoft HTML Workshop is %s. If not, specify via option -c.' % default
|
||||
options.HtmlWorkshopPath = default
|
||||
|
@ -382,7 +382,7 @@ if __name__ == '__main__':
|
|||
|
||||
# create package model object firstly
|
||||
pkgObj = createPackageObject(wspath, pkgpath)
|
||||
if pkgObj == None:
|
||||
if pkgObj is None:
|
||||
sys.exit(-1)
|
||||
|
||||
# create doxygen action model
|
||||
|
|
|
@ -58,7 +58,7 @@ class Page(BaseDoxygeItem):
|
|||
return subpage
|
||||
|
||||
def AddPages(self, pageArray):
|
||||
if pageArray == None:
|
||||
if pageArray is None:
|
||||
return
|
||||
for page in pageArray:
|
||||
self.AddPage(page)
|
||||
|
@ -370,7 +370,7 @@ class DoxygenConfigFile:
|
|||
self.mWarningFile = str.replace('\\', '/')
|
||||
|
||||
def FileExists(self, path):
|
||||
if path == None:
|
||||
if path is None:
|
||||
return False
|
||||
if len(path) == 0:
|
||||
return False
|
||||
|
@ -382,7 +382,7 @@ class DoxygenConfigFile:
|
|||
return False
|
||||
|
||||
def AddFile(self, path):
|
||||
if path == None:
|
||||
if path is None:
|
||||
return
|
||||
|
||||
if len(path) == 0:
|
||||
|
|
|
@ -553,7 +553,7 @@ class EfiFvMapFile(object):
|
|||
if line[0] != ' ':
|
||||
# new entry
|
||||
ret = rMapEntry.match(line)
|
||||
if ret != None:
|
||||
if ret is not None:
|
||||
name = ret.groups()[0]
|
||||
baseaddr = int(ret.groups()[1], 16)
|
||||
entry = int(ret.groups()[2], 16)
|
||||
|
|
|
@ -34,7 +34,7 @@ class BaseINIFile(object):
|
|||
if key not in cls._objs.keys():
|
||||
cls._objs[key] = object.__new__(cls, *args, **kwargs)
|
||||
|
||||
if parent != None:
|
||||
if parent is not None:
|
||||
cls._objs[key].AddParent(parent)
|
||||
|
||||
return cls._objs[key]
|
||||
|
@ -47,7 +47,7 @@ class BaseINIFile(object):
|
|||
self._isModify = True
|
||||
|
||||
def AddParent(self, parent):
|
||||
if parent == None: return
|
||||
if parent is None: return
|
||||
if not hasattr(self, "_parents"):
|
||||
self._parents = []
|
||||
|
||||
|
@ -122,7 +122,7 @@ class BaseINIFile(object):
|
|||
continue
|
||||
|
||||
m = section_re.match(templine)
|
||||
if m!= None: # found a section
|
||||
if mis not None: # found a section
|
||||
inGlobal = False
|
||||
# Finish the latest section first
|
||||
if len(sObjs) != 0:
|
||||
|
@ -165,7 +165,7 @@ class BaseINIFile(object):
|
|||
def Destroy(self, parent):
|
||||
|
||||
# check referenced parent
|
||||
if parent != None:
|
||||
if parent is not None:
|
||||
assert parent in self._parents, "when destory ini object, can not found parent reference!"
|
||||
self._parents.remove(parent)
|
||||
|
||||
|
@ -307,7 +307,7 @@ class BaseINISection(object):
|
|||
visit += 1
|
||||
continue
|
||||
line = line.split('#')[0].strip()
|
||||
if iniObj != None:
|
||||
if iniObj is not None:
|
||||
if line.endswith('}'):
|
||||
iniObj._end = visit - self._start
|
||||
if not iniObj.Parse():
|
||||
|
|
|
@ -35,14 +35,14 @@ def WarnMsg(mess, fName=None, fNo=None):
|
|||
def NormalMessage(type, mess, fName=None, fNo=None):
|
||||
strMsg = type
|
||||
|
||||
if fName != None:
|
||||
if fName is not None:
|
||||
strMsg += ' %s' % fName.replace('/', '\\')
|
||||
if fNo != None:
|
||||
if fNo is not None:
|
||||
strMsg += '(%d):' % fNo
|
||||
else:
|
||||
strMsg += ' :'
|
||||
|
||||
if fName == None and fNo == None:
|
||||
if fName is None and fNo is None:
|
||||
strMsg += ' '
|
||||
strMsg += mess
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ class SurfaceObject(object):
|
|||
|
||||
def Load(self, relativePath):
|
||||
# if has been loaded, directly return
|
||||
if self._fileObj != None: return True
|
||||
if self._fileObj is not None: return True
|
||||
|
||||
relativePath = os.path.normpath(relativePath)
|
||||
fullPath = os.path.join(self._workspace, relativePath)
|
||||
|
@ -160,7 +160,7 @@ class Platform(SurfaceObject):
|
|||
return dsc.DSCFile
|
||||
|
||||
def GetModuleCount(self):
|
||||
if self.GetFileObj() == None:
|
||||
if self.GetFileObj() is None:
|
||||
ErrorMsg("Fail to get module count because DSC file has not been load!")
|
||||
|
||||
return len(self.GetFileObj().GetComponents())
|
||||
|
@ -171,7 +171,7 @@ class Platform(SurfaceObject):
|
|||
def LoadModules(self, precallback=None, postcallback=None):
|
||||
for obj in self.GetFileObj().GetComponents():
|
||||
mFilename = obj.GetFilename()
|
||||
if precallback != None:
|
||||
if precallback is not None:
|
||||
precallback(self, mFilename)
|
||||
arch = obj.GetArch()
|
||||
if arch.lower() == 'common':
|
||||
|
@ -182,7 +182,7 @@ class Platform(SurfaceObject):
|
|||
module = Module(self, self.GetWorkspace())
|
||||
if module.Load(mFilename, arch, obj.GetOveridePcds(), obj.GetOverideLibs()):
|
||||
self._modules.append(module)
|
||||
if postcallback != None:
|
||||
if postcallback is not None:
|
||||
postcallback(self, module)
|
||||
else:
|
||||
del module
|
||||
|
@ -222,7 +222,7 @@ class Platform(SurfaceObject):
|
|||
for obj in objs:
|
||||
if obj.GetPcdName().lower() == name.lower():
|
||||
arr.append(obj)
|
||||
if arch != None:
|
||||
if arch is not None:
|
||||
arr = self.FilterObjsByArch(arr, arch)
|
||||
return arr
|
||||
|
||||
|
@ -292,7 +292,7 @@ class Platform(SurfaceObject):
|
|||
newSect = newDsc.AddNewSection(oldSect.GetName())
|
||||
for oldComObj in oldSect.GetObjects():
|
||||
module = self.GetModuleObject(oldComObj.GetFilename(), oldSect.GetArch())
|
||||
if module == None: continue
|
||||
if module is None: continue
|
||||
|
||||
newComObj = dsc.DSCComponentObject(newSect)
|
||||
newComObj.SetFilename(oldComObj.GetFilename())
|
||||
|
@ -300,7 +300,7 @@ class Platform(SurfaceObject):
|
|||
# add all library instance for override section
|
||||
libdict = module.GetLibraries()
|
||||
for libclass in libdict.keys():
|
||||
if libdict[libclass] != None:
|
||||
if libdict[libclass] is not None:
|
||||
newComObj.AddOverideLib(libclass, libdict[libclass].GetRelativeFilename().replace('\\', '/'))
|
||||
|
||||
# add all pcds for override section
|
||||
|
@ -338,7 +338,7 @@ class Module(SurfaceObject):
|
|||
|
||||
def Destroy(self):
|
||||
for lib in self._libs.values():
|
||||
if lib != None:
|
||||
if lib is not None:
|
||||
lib.Destroy()
|
||||
self._libs.clear()
|
||||
|
||||
|
@ -351,12 +351,12 @@ class Module(SurfaceObject):
|
|||
del self._ppis[:]
|
||||
|
||||
for protocol in self._protocols:
|
||||
if protocol != None:
|
||||
if protocol is not None:
|
||||
protocol.DeRef(self)
|
||||
del self._protocols[:]
|
||||
|
||||
for guid in self._guids:
|
||||
if guid != None:
|
||||
if guid is not None:
|
||||
guid.DeRef(self)
|
||||
del self._guids[:]
|
||||
|
||||
|
@ -375,9 +375,9 @@ class Module(SurfaceObject):
|
|||
return False
|
||||
|
||||
self._arch = arch
|
||||
if overidePcds != None:
|
||||
if overidePcds is not None:
|
||||
self._overideLibs = overideLibs
|
||||
if overideLibs != None:
|
||||
if overideLibs is not None:
|
||||
self._overidePcds = overidePcds
|
||||
|
||||
self._SearchLibraries()
|
||||
|
@ -403,7 +403,7 @@ class Module(SurfaceObject):
|
|||
def GetPcds(self):
|
||||
pcds = self._pcds.copy()
|
||||
for lib in self._libs.values():
|
||||
if lib == None: continue
|
||||
if lib is None: continue
|
||||
for name in lib._pcds.keys():
|
||||
pcds[name] = lib._pcds[name]
|
||||
return pcds
|
||||
|
@ -412,7 +412,7 @@ class Module(SurfaceObject):
|
|||
ppis = []
|
||||
ppis += self._ppis
|
||||
for lib in self._libs.values():
|
||||
if lib == None: continue
|
||||
if lib is None: continue
|
||||
ppis += lib._ppis
|
||||
return ppis
|
||||
|
||||
|
@ -420,7 +420,7 @@ class Module(SurfaceObject):
|
|||
pros = []
|
||||
pros = self._protocols
|
||||
for lib in self._libs.values():
|
||||
if lib == None: continue
|
||||
if lib is None: continue
|
||||
pros += lib._protocols
|
||||
return pros
|
||||
|
||||
|
@ -428,7 +428,7 @@ class Module(SurfaceObject):
|
|||
guids = []
|
||||
guids += self._guids
|
||||
for lib in self._libs.values():
|
||||
if lib == None: continue
|
||||
if lib is None: continue
|
||||
guids += lib._guids
|
||||
return guids
|
||||
|
||||
|
@ -436,12 +436,12 @@ class Module(SurfaceObject):
|
|||
deps = []
|
||||
deps += self._depexs
|
||||
for lib in self._libs.values():
|
||||
if lib == None: continue
|
||||
if lib is None: continue
|
||||
deps += lib._depexs
|
||||
return deps
|
||||
|
||||
def IsLibrary(self):
|
||||
return self.GetFileObj().GetDefine("LIBRARY_CLASS") != None
|
||||
return self.GetFileObj().GetDefine("LIBRARY_CLASS") is not None
|
||||
|
||||
def GetLibraryInstance(self, classname, arch, type):
|
||||
if classname not in self._libs.keys():
|
||||
|
@ -454,7 +454,7 @@ class Module(SurfaceObject):
|
|||
parent = self.GetParent()
|
||||
if issubclass(parent.__class__, Platform):
|
||||
path = parent.GetLibraryPath(classname, arch, type)
|
||||
if path == None:
|
||||
if path is None:
|
||||
ErrorMsg('Fail to get library instance for %s' % classname, self.GetFilename())
|
||||
return None
|
||||
self._libs[classname] = Library(self, self.GetWorkspace())
|
||||
|
@ -477,7 +477,7 @@ class Module(SurfaceObject):
|
|||
continue
|
||||
classname = obj.GetClass()
|
||||
instance = self.GetLibraryInstance(classname, arch, type)
|
||||
if not self.IsLibrary() and instance != None:
|
||||
if not self.IsLibrary() and instance is not None:
|
||||
instance._isInherit = False
|
||||
|
||||
if classname not in self._libs.keys():
|
||||
|
@ -490,7 +490,7 @@ class Module(SurfaceObject):
|
|||
pros = []
|
||||
deps = []
|
||||
guids = []
|
||||
if self.GetFileObj() != None:
|
||||
if self.GetFileObj() is not None:
|
||||
pcds = self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('pcd'),
|
||||
self.GetArch())
|
||||
for pcd in pcds:
|
||||
|
@ -534,31 +534,31 @@ class Module(SurfaceObject):
|
|||
objs = self.GetFileObj().GetSectionObjectsByName('packages')
|
||||
for obj in objs:
|
||||
package = self.GetPlatform().GetPackage(obj.GetPath())
|
||||
if package != None:
|
||||
if package is not None:
|
||||
self._packages.append(package)
|
||||
|
||||
def GetPackages(self):
|
||||
return self._packages
|
||||
|
||||
def GetPcdObjects(self):
|
||||
if self.GetFileObj() == None:
|
||||
if self.GetFileObj() is None:
|
||||
return []
|
||||
|
||||
return self.GetFileObj().GetSectionObjectsByName('pcd')
|
||||
|
||||
def GetLibraryClassHeaderFilePath(self):
|
||||
lcname = self.GetFileObj().GetProduceLibraryClass()
|
||||
if lcname == None: return None
|
||||
if lcname is None: return None
|
||||
|
||||
pkgs = self.GetPackages()
|
||||
for package in pkgs:
|
||||
path = package.GetLibraryClassHeaderPathByName(lcname)
|
||||
if path != None:
|
||||
if path is not None:
|
||||
return os.path.realpath(os.path.join(package.GetFileObj().GetPackageRootPath(), path))
|
||||
return None
|
||||
|
||||
def Reload(self, force=False, callback=None):
|
||||
if callback != None:
|
||||
if callback is not None:
|
||||
callback(self, "Starting reload...")
|
||||
|
||||
ret = SurfaceObject.Reload(self, force)
|
||||
|
@ -568,7 +568,7 @@ class Module(SurfaceObject):
|
|||
return True
|
||||
|
||||
for lib in self._libs.values():
|
||||
if lib != None:
|
||||
if lib is not None:
|
||||
lib.Destroy()
|
||||
self._libs.clear()
|
||||
|
||||
|
@ -591,13 +591,13 @@ class Module(SurfaceObject):
|
|||
del self._packages[:]
|
||||
del self._depexs[:]
|
||||
|
||||
if callback != None:
|
||||
if callback is not None:
|
||||
callback(self, "Searching libraries...")
|
||||
self._SearchLibraries()
|
||||
if callback != None:
|
||||
if callback is not None:
|
||||
callback(self, "Searching packages...")
|
||||
self._SearchPackage()
|
||||
if callback != None:
|
||||
if callback is not None:
|
||||
callback(self, "Searching surface items...")
|
||||
self._SearchSurfaceItems()
|
||||
|
||||
|
@ -665,16 +665,16 @@ class Package(SurfaceObject):
|
|||
|
||||
def Destroy(self):
|
||||
for pcd in self._pcds.values():
|
||||
if pcd != None:
|
||||
if pcd is not None:
|
||||
pcd.Destroy()
|
||||
for guid in self._guids.values():
|
||||
if guid != None:
|
||||
if guid is not None:
|
||||
guid.Destroy()
|
||||
for protocol in self._protocols.values():
|
||||
if protocol != None:
|
||||
if protocol is not None:
|
||||
protocol.Destroy()
|
||||
for ppi in self._ppis.values():
|
||||
if ppi != None:
|
||||
if ppi is not None:
|
||||
ppi.Destroy()
|
||||
self._pcds.clear()
|
||||
self._guids.clear()
|
||||
|
@ -689,7 +689,7 @@ class Package(SurfaceObject):
|
|||
pcds = self.GetFileObj().GetSectionObjectsByName('pcds')
|
||||
for pcd in pcds:
|
||||
if pcd.GetPcdName() in self._pcds.keys():
|
||||
if self._pcds[pcd.GetPcdName()] != None:
|
||||
if self._pcds[pcd.GetPcdName()] is not None:
|
||||
self._pcds[pcd.GetPcdName()].AddDecObj(pcd)
|
||||
else:
|
||||
self._pcds[pcd.GetPcdName()] = PcdItem(pcd.GetPcdName(), self, pcd)
|
||||
|
@ -726,7 +726,7 @@ class Package(SurfaceObject):
|
|||
def GetPcdDefineObjs(self, name=None):
|
||||
arr = []
|
||||
objs = self.GetFileObj().GetSectionObjectsByName('pcds')
|
||||
if name == None: return objs
|
||||
if name is None: return objs
|
||||
|
||||
for obj in objs:
|
||||
if obj.GetPcdName().lower() == name.lower():
|
||||
|
@ -772,7 +772,7 @@ class ModulePcd(object):
|
|||
|
||||
def __init__(self, parent, name, infObj, pcdItem):
|
||||
assert issubclass(parent.__class__, Module), "Module's PCD's parent must be module!"
|
||||
assert pcdItem != None, 'Pcd %s does not in some package!' % name
|
||||
assert pcdItem is not None, 'Pcd %s does not in some package!' % name
|
||||
|
||||
self._name = name
|
||||
self._parent = parent
|
||||
|
|
|
@ -77,7 +77,7 @@ class DECSection(ini.BaseINISection):
|
|||
return arr[1]
|
||||
|
||||
def IsArchMatch(self, arch):
|
||||
if arch == None or self.GetArch() == 'common':
|
||||
if arch is None or self.GetArch() == 'common':
|
||||
return True
|
||||
|
||||
if self.GetArch().lower() != arch.lower():
|
||||
|
|
|
@ -69,7 +69,7 @@ class DoxygenAction:
|
|||
self._chmCallback = None
|
||||
|
||||
def Log(self, message, level='info'):
|
||||
if self._log != None:
|
||||
if self._log is not None:
|
||||
self._log(message, level)
|
||||
|
||||
def IsVerbose(self):
|
||||
|
@ -94,7 +94,7 @@ class DoxygenAction:
|
|||
|
||||
self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")
|
||||
indexPagePath = self.GenerateIndexPage()
|
||||
if indexPagePath == None:
|
||||
if indexPagePath is None:
|
||||
self.Log("Fail to generate index page!\n", 'error')
|
||||
return False
|
||||
else:
|
||||
|
@ -109,7 +109,7 @@ class DoxygenAction:
|
|||
self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)
|
||||
|
||||
# launch doxygen tool to generate document
|
||||
if self._doxygenCallback != None:
|
||||
if self._doxygenCallback is not None:
|
||||
self.Log(" >>>>>> Start doxygen process...Zzz...\n")
|
||||
if not self._doxygenCallback(self._doxPath, configFilePath):
|
||||
return False
|
||||
|
@ -166,9 +166,9 @@ class PackageDocumentAction(DoxygenAction):
|
|||
self._configFile.AddPreDefined('MDE_CPU_ARM')
|
||||
|
||||
namestr = self._pObj.GetName()
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
namestr += '[%s]' % self._arch
|
||||
if self._tooltag != None:
|
||||
if self._tooltag is not None:
|
||||
namestr += '[%s]' % self._tooltag
|
||||
self._configFile.SetProjectName(namestr)
|
||||
self._configFile.SetStripPath(self._pObj.GetWorkspace())
|
||||
|
@ -314,7 +314,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)
|
||||
if len(objs) == 0: return []
|
||||
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
for obj in objs:
|
||||
classPage = doxygen.Page(obj.GetClassName(),
|
||||
"lc_%s" % obj.GetClassName())
|
||||
|
@ -399,7 +399,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())
|
||||
filePath = mo.groups()[0]
|
||||
|
||||
if filePath == None or len(filePath) == 0:
|
||||
if filePath is None or len(filePath) == 0:
|
||||
continue
|
||||
|
||||
# find header file in module's path firstly.
|
||||
|
@ -417,7 +417,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
if os.path.exists(incPath):
|
||||
fullPath = incPath
|
||||
break
|
||||
if infObj != None:
|
||||
if infObj is not None:
|
||||
pkgInfObjs = infObj.GetSectionObjectsByName('packages')
|
||||
for obj in pkgInfObjs:
|
||||
decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))
|
||||
|
@ -433,10 +433,10 @@ class PackageDocumentAction(DoxygenAction):
|
|||
if os.path.exists(os.path.join(incPath, filePath)):
|
||||
fullPath = os.path.join(os.path.join(incPath, filePath))
|
||||
break
|
||||
if fullPath != None:
|
||||
if fullPath is not None:
|
||||
break
|
||||
|
||||
if fullPath == None and self.IsVerbose():
|
||||
if fullPath is None and self.IsVerbose():
|
||||
self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')
|
||||
return
|
||||
else:
|
||||
|
@ -477,7 +477,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())
|
||||
pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])
|
||||
typeRoot = typeRootPageDict[obj.GetPcdType()]
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
|
||||
'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
|
||||
pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
|
||||
|
@ -573,7 +573,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
pageRoot = doxygen.Page('GUID', 'guid_root_page')
|
||||
objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)
|
||||
if len(objs) == 0: return []
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
for obj in objs:
|
||||
pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
|
||||
else:
|
||||
|
@ -626,7 +626,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
pageRoot = doxygen.Page('PPI', 'ppi_root_page')
|
||||
objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)
|
||||
if len(objs) == 0: return []
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
for obj in objs:
|
||||
pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
|
||||
else:
|
||||
|
@ -680,7 +680,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')
|
||||
objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)
|
||||
if len(objs) == 0: return []
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
for obj in objs:
|
||||
pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
|
||||
else:
|
||||
|
@ -773,7 +773,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
if not infObj.Parse():
|
||||
self.Log('Fail to load INF file %s' % inf)
|
||||
continue
|
||||
if infObj.GetProduceLibraryClass() != None:
|
||||
if infObj.GetProduceLibraryClass() is not None:
|
||||
libObjs.append(infObj)
|
||||
else:
|
||||
modObjs.append(infObj)
|
||||
|
@ -951,7 +951,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),
|
||||
workspace,
|
||||
refDecObjs)
|
||||
if retarr != None:
|
||||
if retarr is not None:
|
||||
pkgname, hPath = retarr
|
||||
else:
|
||||
self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')
|
||||
|
|
|
@ -66,7 +66,7 @@ class DoxygenAction:
|
|||
self._chmCallback = None
|
||||
|
||||
def Log(self, message, level='info'):
|
||||
if self._log != None:
|
||||
if self._log is not None:
|
||||
self._log(message, level)
|
||||
|
||||
def IsVerbose(self):
|
||||
|
@ -91,7 +91,7 @@ class DoxygenAction:
|
|||
|
||||
self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")
|
||||
indexPagePath = self.GenerateIndexPage()
|
||||
if indexPagePath == None:
|
||||
if indexPagePath is None:
|
||||
self.Log("Fail to generate index page!\n", 'error')
|
||||
return False
|
||||
else:
|
||||
|
@ -106,7 +106,7 @@ class DoxygenAction:
|
|||
self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)
|
||||
|
||||
# launch doxygen tool to generate document
|
||||
if self._doxygenCallback != None:
|
||||
if self._doxygenCallback is not None:
|
||||
self.Log(" >>>>>> Start doxygen process...Zzz...\n")
|
||||
if not self._doxygenCallback(self._doxPath, configFilePath):
|
||||
return False
|
||||
|
@ -167,9 +167,9 @@ class PackageDocumentAction(DoxygenAction):
|
|||
self._configFile.AddPreDefined(macro)
|
||||
|
||||
namestr = self._pObj.GetName()
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
namestr += '[%s]' % self._arch
|
||||
if self._tooltag != None:
|
||||
if self._tooltag is not None:
|
||||
namestr += '[%s]' % self._tooltag
|
||||
self._configFile.SetProjectName(namestr)
|
||||
self._configFile.SetStripPath(self._pObj.GetWorkspace())
|
||||
|
@ -315,7 +315,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)
|
||||
if len(objs) == 0: return []
|
||||
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
for obj in objs:
|
||||
classPage = doxygen.Page(obj.GetClassName(),
|
||||
"lc_%s" % obj.GetClassName())
|
||||
|
@ -401,7 +401,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())
|
||||
filePath = mo.groups()[0]
|
||||
|
||||
if filePath == None or len(filePath) == 0:
|
||||
if filePath is None or len(filePath) == 0:
|
||||
continue
|
||||
|
||||
# find header file in module's path firstly.
|
||||
|
@ -419,7 +419,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
if os.path.exists(incPath):
|
||||
fullPath = incPath
|
||||
break
|
||||
if infObj != None:
|
||||
if infObj is not None:
|
||||
pkgInfObjs = infObj.GetSectionObjectsByName('packages')
|
||||
for obj in pkgInfObjs:
|
||||
decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))
|
||||
|
@ -435,10 +435,10 @@ class PackageDocumentAction(DoxygenAction):
|
|||
if os.path.exists(os.path.join(incPath, filePath)):
|
||||
fullPath = os.path.join(os.path.join(incPath, filePath))
|
||||
break
|
||||
if fullPath != None:
|
||||
if fullPath is not None:
|
||||
break
|
||||
|
||||
if fullPath == None and self.IsVerbose():
|
||||
if fullPath is None and self.IsVerbose():
|
||||
self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')
|
||||
return
|
||||
else:
|
||||
|
@ -479,7 +479,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())
|
||||
pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])
|
||||
typeRoot = typeRootPageDict[obj.GetPcdType()]
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
|
||||
'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
|
||||
pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
|
||||
|
@ -575,7 +575,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
pageRoot = doxygen.Page('GUID', 'guid_root_page')
|
||||
objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)
|
||||
if len(objs) == 0: return []
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
for obj in objs:
|
||||
pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
|
||||
else:
|
||||
|
@ -628,7 +628,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
pageRoot = doxygen.Page('PPI', 'ppi_root_page')
|
||||
objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)
|
||||
if len(objs) == 0: return []
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
for obj in objs:
|
||||
pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
|
||||
else:
|
||||
|
@ -682,7 +682,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')
|
||||
objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)
|
||||
if len(objs) == 0: return []
|
||||
if self._arch != None:
|
||||
if self._arch is not None:
|
||||
for obj in objs:
|
||||
pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
|
||||
else:
|
||||
|
@ -775,7 +775,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
if not infObj.Parse():
|
||||
self.Log('Fail to load INF file %s' % inf)
|
||||
continue
|
||||
if infObj.GetProduceLibraryClass() != None:
|
||||
if infObj.GetProduceLibraryClass() is not None:
|
||||
libObjs.append(infObj)
|
||||
else:
|
||||
modObjs.append(infObj)
|
||||
|
@ -954,7 +954,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||
retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),
|
||||
workspace,
|
||||
refDecObjs)
|
||||
if retarr != None:
|
||||
if retarr is not None:
|
||||
pkgname, hPath = retarr
|
||||
else:
|
||||
self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')
|
||||
|
|
|
@ -189,7 +189,7 @@ class DSCComponentObject(DSCSectionObject):
|
|||
lines.append(' <%s>\n' % key)
|
||||
|
||||
for name, value in self._OveridePcds[key]:
|
||||
if value != None:
|
||||
if value is not None:
|
||||
lines.append(' %s|%s\n' % (name, value))
|
||||
else:
|
||||
lines.append(' %s\n' % name)
|
||||
|
|
|
@ -23,7 +23,7 @@ class INFFile(ini.BaseINIFile):
|
|||
|
||||
def GetProduceLibraryClass(self):
|
||||
obj = self.GetDefine("LIBRARY_CLASS")
|
||||
if obj == None: return None
|
||||
if obj is None: return None
|
||||
|
||||
return obj.split('|')[0].strip()
|
||||
|
||||
|
@ -59,7 +59,7 @@ class INFFile(ini.BaseINIFile):
|
|||
if not ini.BaseINIFile.Parse(self):
|
||||
return False
|
||||
classname = self.GetProduceLibraryClass()
|
||||
if classname != None:
|
||||
if classname is not None:
|
||||
libobjdict = INFFile._libobjs
|
||||
if libobjdict.has_key(classname):
|
||||
if self not in libobjdict[classname]:
|
||||
|
@ -77,7 +77,7 @@ class INFFile(ini.BaseINIFile):
|
|||
|
||||
def Clear(self):
|
||||
classname = self.GetProduceLibraryClass()
|
||||
if classname != None:
|
||||
if classname is not None:
|
||||
libobjdict = INFFile._libobjs
|
||||
libobjdict[classname].remove(self)
|
||||
if len(libobjdict[classname]) == 0:
|
||||
|
@ -114,7 +114,7 @@ class INFSection(ini.BaseINISection):
|
|||
return arr[1]
|
||||
|
||||
def IsArchMatch(self, arch):
|
||||
if arch == None or self.GetArch() == 'common':
|
||||
if arch is None or self.GetArch() == 'common':
|
||||
return True
|
||||
|
||||
if self.GetArch().lower() != arch.lower():
|
||||
|
@ -258,9 +258,9 @@ class INFSourceObject(INFSectionObject):
|
|||
del objdict[self.mFilename]
|
||||
|
||||
def IsMatchFamily(self, family):
|
||||
if family == None:
|
||||
if family is None:
|
||||
return True
|
||||
if self.mFamily != None:
|
||||
if self.mFamily is not None:
|
||||
if family.strip().lower() == self.mFamily.lower():
|
||||
return True
|
||||
else:
|
||||
|
|
|
@ -766,7 +766,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||
for Fv in Fdf.Profile.FvDict:
|
||||
_GuidDict = {}
|
||||
for FfsFile in Fdf.Profile.FvDict[Fv].FfsList:
|
||||
if FfsFile.InfFileName and FfsFile.NameGuid == None:
|
||||
if FfsFile.InfFileName and FfsFile.NameGuid is None:
|
||||
#
|
||||
# Get INF file GUID
|
||||
#
|
||||
|
@ -817,7 +817,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||
ExtraData=self.FdfFile)
|
||||
InfFoundFlag = False
|
||||
|
||||
if FfsFile.NameGuid != None:
|
||||
if FfsFile.NameGuid is not None:
|
||||
_CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$")
|
||||
|
||||
#
|
||||
|
@ -939,13 +939,13 @@ class WorkspaceAutoGen(AutoGen):
|
|||
|
||||
## Return the directory to store FV files
|
||||
def _GetFvDir(self):
|
||||
if self._FvDir == None:
|
||||
if self._FvDir is None:
|
||||
self._FvDir = path.join(self.BuildDir, 'FV')
|
||||
return self._FvDir
|
||||
|
||||
## Return the directory to store all intermediate and final files built
|
||||
def _GetBuildDir(self):
|
||||
if self._BuildDir == None:
|
||||
if self._BuildDir is None:
|
||||
return self.AutoGenObjectList[0].BuildDir
|
||||
|
||||
## Return the build output directory platform specifies
|
||||
|
@ -973,7 +973,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||
# @retval string Makefile directory
|
||||
#
|
||||
def _GetMakeFileDir(self):
|
||||
if self._MakeFileDir == None:
|
||||
if self._MakeFileDir is None:
|
||||
self._MakeFileDir = self.BuildDir
|
||||
return self._MakeFileDir
|
||||
|
||||
|
@ -982,7 +982,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||
# @retval string Build command string
|
||||
#
|
||||
def _GetBuildCommand(self):
|
||||
if self._BuildCommand == None:
|
||||
if self._BuildCommand is None:
|
||||
# BuildCommand should be all the same. So just get one from platform AutoGen
|
||||
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
|
||||
return self._BuildCommand
|
||||
|
@ -1215,7 +1215,7 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
self.VariableInfo = None
|
||||
|
||||
if GlobalData.gFdfParser != None:
|
||||
if GlobalData.gFdfParser is not None:
|
||||
self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList
|
||||
for Inf in self._AsBuildInfList:
|
||||
InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)
|
||||
|
@ -1331,7 +1331,7 @@ class PlatformAutoGen(AutoGen):
|
|||
for SkuName in Pcd.SkuInfoList:
|
||||
Sku = Pcd.SkuInfoList[SkuName]
|
||||
SkuId = Sku.SkuId
|
||||
if SkuId == None or SkuId == '':
|
||||
if SkuId is None or SkuId == '':
|
||||
continue
|
||||
if len(Sku.VariableName) > 0:
|
||||
VariableGuidStructure = Sku.VariableGuidValue
|
||||
|
@ -1642,7 +1642,7 @@ class PlatformAutoGen(AutoGen):
|
|||
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
|
||||
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
|
||||
NeedProcessVpdMapFile = True
|
||||
if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
|
||||
if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':
|
||||
EdkLogger.error("Build", FILE_NOT_FOUND, \
|
||||
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
|
||||
|
||||
|
@ -1654,7 +1654,7 @@ class PlatformAutoGen(AutoGen):
|
|||
for DscPcd in PlatformPcds:
|
||||
DscPcdEntry = self._PlatformPcds[DscPcd]
|
||||
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
|
||||
if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
|
||||
if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):
|
||||
FoundFlag = False
|
||||
for VpdPcd in VpdFile._VpdArray.keys():
|
||||
# This PCD has been referenced by module
|
||||
|
@ -1734,7 +1734,7 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
|
||||
VpdSkuMap[DscPcd] = SkuValueMap
|
||||
if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
|
||||
if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
|
||||
VpdFile.GetCount() != 0:
|
||||
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
|
||||
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
|
||||
|
@ -1817,14 +1817,14 @@ class PlatformAutoGen(AutoGen):
|
|||
BPDGToolName = ToolDef["PATH"]
|
||||
break
|
||||
# Call third party GUID BPDG tool.
|
||||
if BPDGToolName != None:
|
||||
if BPDGToolName is not None:
|
||||
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
|
||||
else:
|
||||
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
|
||||
|
||||
## Return the platform build data object
|
||||
def _GetPlatform(self):
|
||||
if self._Platform == None:
|
||||
if self._Platform is None:
|
||||
self._Platform = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
|
||||
return self._Platform
|
||||
|
||||
|
@ -1842,7 +1842,7 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
## Return the FDF file name
|
||||
def _GetFdfFile(self):
|
||||
if self._FdfFile == None:
|
||||
if self._FdfFile is None:
|
||||
if self.Workspace.FdfFile != "":
|
||||
self._FdfFile= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)
|
||||
else:
|
||||
|
@ -1855,7 +1855,7 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
## Return the directory to store all intermediate and final files built
|
||||
def _GetBuildDir(self):
|
||||
if self._BuildDir == None:
|
||||
if self._BuildDir is None:
|
||||
if os.path.isabs(self.OutputDir):
|
||||
self._BuildDir = path.join(
|
||||
path.abspath(self.OutputDir),
|
||||
|
@ -1875,7 +1875,7 @@ class PlatformAutoGen(AutoGen):
|
|||
# @retval string Makefile directory
|
||||
#
|
||||
def _GetMakeFileDir(self):
|
||||
if self._MakeFileDir == None:
|
||||
if self._MakeFileDir is None:
|
||||
self._MakeFileDir = path.join(self.BuildDir, self.Arch)
|
||||
return self._MakeFileDir
|
||||
|
||||
|
@ -1884,7 +1884,7 @@ class PlatformAutoGen(AutoGen):
|
|||
# @retval string Build command string
|
||||
#
|
||||
def _GetBuildCommand(self):
|
||||
if self._BuildCommand == None:
|
||||
if self._BuildCommand is None:
|
||||
self._BuildCommand = []
|
||||
if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
|
||||
self._BuildCommand += SplitOption(self.ToolDefinition["MAKE"]["PATH"])
|
||||
|
@ -1906,7 +1906,7 @@ class PlatformAutoGen(AutoGen):
|
|||
# Get each tool defition for given tool chain from tools_def.txt and platform
|
||||
#
|
||||
def _GetToolDefinition(self):
|
||||
if self._ToolDefinitions == None:
|
||||
if self._ToolDefinitions is None:
|
||||
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
|
||||
if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
|
||||
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
|
||||
|
@ -1972,13 +1972,13 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
## Return the paths of tools
|
||||
def _GetToolDefFile(self):
|
||||
if self._ToolDefFile == None:
|
||||
if self._ToolDefFile is None:
|
||||
self._ToolDefFile = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
|
||||
return self._ToolDefFile
|
||||
|
||||
## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
|
||||
def _GetToolChainFamily(self):
|
||||
if self._ToolChainFamily == None:
|
||||
if self._ToolChainFamily is None:
|
||||
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
|
||||
if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
|
||||
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
|
||||
|
@ -1991,7 +1991,7 @@ class PlatformAutoGen(AutoGen):
|
|||
return self._ToolChainFamily
|
||||
|
||||
def _GetBuildRuleFamily(self):
|
||||
if self._BuildRuleFamily == None:
|
||||
if self._BuildRuleFamily is None:
|
||||
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
|
||||
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
|
||||
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
|
||||
|
@ -2005,19 +2005,19 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
## Return the build options specific for all modules in this platform
|
||||
def _GetBuildOptions(self):
|
||||
if self._BuildOption == None:
|
||||
if self._BuildOption is None:
|
||||
self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions)
|
||||
return self._BuildOption
|
||||
|
||||
## Return the build options specific for EDK modules in this platform
|
||||
def _GetEdkBuildOptions(self):
|
||||
if self._EdkBuildOption == None:
|
||||
if self._EdkBuildOption is None:
|
||||
self._EdkBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
|
||||
return self._EdkBuildOption
|
||||
|
||||
## Return the build options specific for EDKII modules in this platform
|
||||
def _GetEdkIIBuildOptions(self):
|
||||
if self._EdkIIBuildOption == None:
|
||||
if self._EdkIIBuildOption is None:
|
||||
self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
|
||||
return self._EdkIIBuildOption
|
||||
|
||||
|
@ -2026,7 +2026,7 @@ class PlatformAutoGen(AutoGen):
|
|||
# @retval BuildRule object
|
||||
#
|
||||
def _GetBuildRule(self):
|
||||
if self._BuildRule == None:
|
||||
if self._BuildRule is None:
|
||||
BuildRuleFile = None
|
||||
if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
|
||||
BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
|
||||
|
@ -2046,7 +2046,7 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
## Summarize the packages used by modules in this platform
|
||||
def _GetPackageList(self):
|
||||
if self._PackageList == None:
|
||||
if self._PackageList is None:
|
||||
self._PackageList = set()
|
||||
for La in self.LibraryAutoGenList:
|
||||
self._PackageList.update(La.DependentPackageList)
|
||||
|
@ -2071,19 +2071,19 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
## Get list of non-dynamic PCDs
|
||||
def _GetNonDynamicPcdList(self):
|
||||
if self._NonDynamicPcdList == None:
|
||||
if self._NonDynamicPcdList is None:
|
||||
self.CollectPlatformDynamicPcds()
|
||||
return self._NonDynamicPcdList
|
||||
|
||||
## Get list of dynamic PCDs
|
||||
def _GetDynamicPcdList(self):
|
||||
if self._DynamicPcdList == None:
|
||||
if self._DynamicPcdList is None:
|
||||
self.CollectPlatformDynamicPcds()
|
||||
return self._DynamicPcdList
|
||||
|
||||
## Generate Token Number for all PCD
|
||||
def _GetPcdTokenNumbers(self):
|
||||
if self._PcdTokenNumber == None:
|
||||
if self._PcdTokenNumber is None:
|
||||
self._PcdTokenNumber = sdict()
|
||||
TokenNumber = 1
|
||||
#
|
||||
|
@ -2151,13 +2151,13 @@ class PlatformAutoGen(AutoGen):
|
|||
|
||||
## Summarize ModuleAutoGen objects of all modules to be built for this platform
|
||||
def _GetModuleAutoGenList(self):
|
||||
if self._ModuleAutoGenList == None:
|
||||
if self._ModuleAutoGenList is None:
|
||||
self._GetAutoGenObjectList()
|
||||
return self._ModuleAutoGenList
|
||||
|
||||
## Summarize ModuleAutoGen objects of all libraries to be built for this platform
|
||||
def _GetLibraryAutoGenList(self):
|
||||
if self._LibraryAutoGenList == None:
|
||||
if self._LibraryAutoGenList is None:
|
||||
self._GetAutoGenObjectList()
|
||||
return self._LibraryAutoGenList
|
||||
|
||||
|
@ -2221,9 +2221,9 @@ class PlatformAutoGen(AutoGen):
|
|||
LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
|
||||
else:
|
||||
LibraryPath = self.Platform.LibraryClasses[LibraryClassName, ModuleType]
|
||||
if LibraryPath == None or LibraryPath == "":
|
||||
if LibraryPath is None or LibraryPath == "":
|
||||
LibraryPath = M.LibraryClasses[LibraryClassName]
|
||||
if LibraryPath == None or LibraryPath == "":
|
||||
if LibraryPath is None or LibraryPath == "":
|
||||
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
|
||||
"Instance of library class [%s] is not found" % LibraryClassName,
|
||||
File=self.MetaFile,
|
||||
|
@ -2233,7 +2233,7 @@ class PlatformAutoGen(AutoGen):
|
|||
# for those forced library instance (NULL library), add a fake library class
|
||||
if LibraryClassName.startswith("NULL"):
|
||||
LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
|
||||
elif LibraryModule.LibraryClass == None \
|
||||
elif LibraryModule.LibraryClass is None \
|
||||
or len(LibraryModule.LibraryClass) == 0 \
|
||||
or (ModuleType != 'USER_DEFINED'
|
||||
and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
|
||||
|
@ -2249,7 +2249,7 @@ class PlatformAutoGen(AutoGen):
|
|||
else:
|
||||
LibraryModule = LibraryInstance[LibraryClassName]
|
||||
|
||||
if LibraryModule == None:
|
||||
if LibraryModule is None:
|
||||
continue
|
||||
|
||||
if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
|
||||
|
@ -2357,7 +2357,7 @@ class PlatformAutoGen(AutoGen):
|
|||
if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
|
||||
TokenCName = PcdItem[0]
|
||||
break
|
||||
if FromPcd != None:
|
||||
if FromPcd is not None:
|
||||
if ToPcd.Pending and FromPcd.Type not in [None, '']:
|
||||
ToPcd.Type = FromPcd.Type
|
||||
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
|
||||
|
@ -2401,7 +2401,7 @@ class PlatformAutoGen(AutoGen):
|
|||
ToPcd.validlists = FromPcd.validlists
|
||||
ToPcd.expressions = FromPcd.expressions
|
||||
|
||||
if FromPcd != None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:
|
||||
if FromPcd is not None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:
|
||||
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
|
||||
% (ToPcd.TokenSpaceGuidCName, TokenCName))
|
||||
Value = ToPcd.DefaultValue
|
||||
|
@ -2447,7 +2447,7 @@ class PlatformAutoGen(AutoGen):
|
|||
Sku = PcdInModule.SkuInfoList[SkuId]
|
||||
if Sku.VariableGuid == '': continue
|
||||
Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
|
||||
if Sku.VariableGuidValue == None:
|
||||
if Sku.VariableGuidValue is None:
|
||||
PackageList = "\n\t".join([str(P) for P in self.PackageList])
|
||||
EdkLogger.error(
|
||||
'build',
|
||||
|
@ -2510,12 +2510,12 @@ class PlatformAutoGen(AutoGen):
|
|||
M = LibraryConsumerList.pop()
|
||||
for LibraryName in M.Libraries:
|
||||
Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']
|
||||
if Library == None:
|
||||
if Library is None:
|
||||
for Key in self.Platform.LibraryClasses.data.keys():
|
||||
if LibraryName.upper() == Key.upper():
|
||||
Library = self.Platform.LibraryClasses[Key, ':dummy:']
|
||||
break
|
||||
if Library == None:
|
||||
if Library is None:
|
||||
EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),
|
||||
ExtraData="\t%s [%s]" % (str(Module), self.Arch))
|
||||
continue
|
||||
|
@ -2570,13 +2570,13 @@ class PlatformAutoGen(AutoGen):
|
|||
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
|
||||
#
|
||||
if (Key[0] == self.BuildRuleFamily and
|
||||
(ModuleStyle == None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
|
||||
(ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
|
||||
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
|
||||
if Target == self.BuildTarget or Target == "*":
|
||||
if ToolChain == self.ToolChain or ToolChain == "*":
|
||||
if Arch == self.Arch or Arch == "*":
|
||||
if Options[Key].startswith("="):
|
||||
if OverrideList.get(Key[1]) != None:
|
||||
if OverrideList.get(Key[1]) is not None:
|
||||
OverrideList.pop(Key[1])
|
||||
OverrideList[Key[1]] = Options[Key]
|
||||
|
||||
|
@ -2600,14 +2600,14 @@ class PlatformAutoGen(AutoGen):
|
|||
if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":
|
||||
if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":
|
||||
if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):
|
||||
if Options.get((self.BuildRuleFamily, NextKey)) != None:
|
||||
if Options.get((self.BuildRuleFamily, NextKey)) is not None:
|
||||
Options.pop((self.BuildRuleFamily, NextKey))
|
||||
else:
|
||||
if Options.get((self.BuildRuleFamily, NowKey)) != None:
|
||||
if Options.get((self.BuildRuleFamily, NowKey)) is not None:
|
||||
Options.pop((self.BuildRuleFamily, NowKey))
|
||||
|
||||
for Key in Options:
|
||||
if ModuleStyle != None and len (Key) > 2:
|
||||
if ModuleStyle is not None and len (Key) > 2:
|
||||
# Check Module style is EDK or EDKII.
|
||||
# Only append build option for the matched style module.
|
||||
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
|
||||
|
@ -2644,7 +2644,7 @@ class PlatformAutoGen(AutoGen):
|
|||
return BuildOptions
|
||||
|
||||
for Key in Options:
|
||||
if ModuleStyle != None and len (Key) > 2:
|
||||
if ModuleStyle is not None and len (Key) > 2:
|
||||
# Check Module style is EDK or EDKII.
|
||||
# Only append build option for the matched style module.
|
||||
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
|
||||
|
@ -2736,7 +2736,7 @@ class PlatformAutoGen(AutoGen):
|
|||
BuildOptions[Tool][Attr] += " " + Value
|
||||
else:
|
||||
BuildOptions[Tool][Attr] = Value
|
||||
if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag != None:
|
||||
if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag is not None:
|
||||
#
|
||||
# Override UNI flag only for EDK module.
|
||||
#
|
||||
|
@ -2942,7 +2942,7 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
# Macros could be used in build_rule.txt (also Makefile)
|
||||
def _GetMacros(self):
|
||||
if self._Macro == None:
|
||||
if self._Macro is None:
|
||||
self._Macro = sdict()
|
||||
self._Macro["WORKSPACE" ] = self.WorkspaceDir
|
||||
self._Macro["MODULE_NAME" ] = self.Name
|
||||
|
@ -2982,7 +2982,7 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Return the module build data object
|
||||
def _GetModule(self):
|
||||
if self._Module == None:
|
||||
if self._Module is None:
|
||||
self._Module = self.Workspace.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
|
||||
return self._Module
|
||||
|
||||
|
@ -3038,8 +3038,8 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Check if the module is library or not
|
||||
def _IsLibrary(self):
|
||||
if self._LibraryFlag == None:
|
||||
if self.Module.LibraryClass != None and self.Module.LibraryClass != []:
|
||||
if self._LibraryFlag is None:
|
||||
if self.Module.LibraryClass is not None and self.Module.LibraryClass != []:
|
||||
self._LibraryFlag = True
|
||||
else:
|
||||
self._LibraryFlag = False
|
||||
|
@ -3051,7 +3051,7 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Return the directory to store intermediate files of the module
|
||||
def _GetBuildDir(self):
|
||||
if self._BuildDir == None:
|
||||
if self._BuildDir is None:
|
||||
self._BuildDir = path.join(
|
||||
self.PlatformInfo.BuildDir,
|
||||
self.Arch,
|
||||
|
@ -3063,15 +3063,15 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Return the directory to store the intermediate object files of the mdoule
|
||||
def _GetOutputDir(self):
|
||||
if self._OutputDir == None:
|
||||
if self._OutputDir is None:
|
||||
self._OutputDir = path.join(self.BuildDir, "OUTPUT")
|
||||
CreateDirectory(self._OutputDir)
|
||||
return self._OutputDir
|
||||
|
||||
## Return the directory to store ffs file
|
||||
def _GetFfsOutputDir(self):
|
||||
if self._FfsOutputDir == None:
|
||||
if GlobalData.gFdfParser != None:
|
||||
if self._FfsOutputDir is None:
|
||||
if GlobalData.gFdfParser is not None:
|
||||
self._FfsOutputDir = path.join(self.PlatformInfo.BuildDir, "FV", "Ffs", self.Guid + self.Name)
|
||||
else:
|
||||
self._FfsOutputDir = ''
|
||||
|
@ -3079,21 +3079,21 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Return the directory to store auto-gened source files of the mdoule
|
||||
def _GetDebugDir(self):
|
||||
if self._DebugDir == None:
|
||||
if self._DebugDir is None:
|
||||
self._DebugDir = path.join(self.BuildDir, "DEBUG")
|
||||
CreateDirectory(self._DebugDir)
|
||||
return self._DebugDir
|
||||
|
||||
## Return the path of custom file
|
||||
def _GetCustomMakefile(self):
|
||||
if self._CustomMakefile == None:
|
||||
if self._CustomMakefile is None:
|
||||
self._CustomMakefile = {}
|
||||
for Type in self.Module.CustomMakefile:
|
||||
if Type in gMakeTypeMap:
|
||||
MakeType = gMakeTypeMap[Type]
|
||||
else:
|
||||
MakeType = 'nmake'
|
||||
if self.SourceOverrideDir != None:
|
||||
if self.SourceOverrideDir is not None:
|
||||
File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])
|
||||
if not os.path.exists(File):
|
||||
File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
|
||||
|
@ -3194,7 +3194,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval list The token list of the dependency expression after parsed
|
||||
#
|
||||
def _GetDepexTokenList(self):
|
||||
if self._DepexList == None:
|
||||
if self._DepexList is None:
|
||||
self._DepexList = {}
|
||||
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
|
||||
return self._DepexList
|
||||
|
@ -3230,7 +3230,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval list The token list of the dependency expression after parsed
|
||||
#
|
||||
def _GetDepexExpressionTokenList(self):
|
||||
if self._DepexExpressionList == None:
|
||||
if self._DepexExpressionList is None:
|
||||
self._DepexExpressionList = {}
|
||||
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
|
||||
return self._DepexExpressionList
|
||||
|
@ -3298,7 +3298,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval dict The dict containing valid options
|
||||
#
|
||||
def _GetModuleBuildOption(self):
|
||||
if self._BuildOption == None:
|
||||
if self._BuildOption is None:
|
||||
self._BuildOption, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
|
||||
if self.BuildRuleOrder:
|
||||
self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
|
||||
|
@ -3309,7 +3309,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval list The include path list
|
||||
#
|
||||
def _GetBuildOptionIncPathList(self):
|
||||
if self._BuildOptionIncPathList == None:
|
||||
if self._BuildOptionIncPathList is None:
|
||||
#
|
||||
# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
|
||||
# is the former use /I , the Latter used -I to specify include directories
|
||||
|
@ -3370,7 +3370,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
|
||||
#
|
||||
def _GetSourceFileList(self):
|
||||
if self._SourceFileList == None:
|
||||
if self._SourceFileList is None:
|
||||
self._SourceFileList = []
|
||||
for F in self.Module.Sources:
|
||||
# match tool chain
|
||||
|
@ -3423,7 +3423,7 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Return the list of unicode files
|
||||
def _GetUnicodeFileList(self):
|
||||
if self._UnicodeFileList == None:
|
||||
if self._UnicodeFileList is None:
|
||||
if TAB_UNICODE_FILE in self.FileTypes:
|
||||
self._UnicodeFileList = self.FileTypes[TAB_UNICODE_FILE]
|
||||
else:
|
||||
|
@ -3432,7 +3432,7 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Return the list of vfr files
|
||||
def _GetVfrFileList(self):
|
||||
if self._VfrFileList == None:
|
||||
if self._VfrFileList is None:
|
||||
if TAB_VFR_FILE in self.FileTypes:
|
||||
self._VfrFileList = self.FileTypes[TAB_VFR_FILE]
|
||||
else:
|
||||
|
@ -3441,7 +3441,7 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Return the list of Image Definition files
|
||||
def _GetIdfFileList(self):
|
||||
if self._IdfFileList == None:
|
||||
if self._IdfFileList is None:
|
||||
if TAB_IMAGE_FILE in self.FileTypes:
|
||||
self._IdfFileList = self.FileTypes[TAB_IMAGE_FILE]
|
||||
else:
|
||||
|
@ -3455,7 +3455,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval list The list of files which can be built later
|
||||
#
|
||||
def _GetBinaryFiles(self):
|
||||
if self._BinaryFileList == None:
|
||||
if self._BinaryFileList is None:
|
||||
self._BinaryFileList = []
|
||||
for F in self.Module.Binaries:
|
||||
if F.Target not in ['COMMON', '*'] and F.Target != self.BuildTarget:
|
||||
|
@ -3465,7 +3465,7 @@ class ModuleAutoGen(AutoGen):
|
|||
return self._BinaryFileList
|
||||
|
||||
def _GetBuildRules(self):
|
||||
if self._BuildRules == None:
|
||||
if self._BuildRules is None:
|
||||
BuildRules = {}
|
||||
BuildRuleDatabase = self.PlatformInfo.BuildRule
|
||||
for Type in BuildRuleDatabase.FileTypeList:
|
||||
|
@ -3492,7 +3492,7 @@ class ModuleAutoGen(AutoGen):
|
|||
return self._BuildRules
|
||||
|
||||
def _ApplyBuildRule(self, File, FileType):
|
||||
if self._BuildTargets == None:
|
||||
if self._BuildTargets is None:
|
||||
self._IntroBuildTargetList = set()
|
||||
self._FinalBuildTargetList = set()
|
||||
self._BuildTargets = {}
|
||||
|
@ -3517,7 +3517,7 @@ class ModuleAutoGen(AutoGen):
|
|||
if Source != File:
|
||||
CreateDirectory(Source.Dir)
|
||||
|
||||
if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList:
|
||||
if File.IsBinary and File == Source and self._BinaryFileList is not None and File in self._BinaryFileList:
|
||||
# Skip all files that are not binary libraries
|
||||
if not self.IsLibrary:
|
||||
continue
|
||||
|
@ -3569,7 +3569,7 @@ class ModuleAutoGen(AutoGen):
|
|||
FileType = TAB_UNKNOWN_FILE
|
||||
|
||||
def _GetTargets(self):
|
||||
if self._BuildTargets == None:
|
||||
if self._BuildTargets is None:
|
||||
self._IntroBuildTargetList = set()
|
||||
self._FinalBuildTargetList = set()
|
||||
self._BuildTargets = {}
|
||||
|
@ -3616,7 +3616,7 @@ class ModuleAutoGen(AutoGen):
|
|||
if self.BuildType == 'UEFI_HII':
|
||||
UniStringAutoGenC = False
|
||||
IdfStringAutoGenC = False
|
||||
if self._AutoGenFileList == None:
|
||||
if self._AutoGenFileList is None:
|
||||
self._AutoGenFileList = {}
|
||||
AutoGenC = TemplateString()
|
||||
AutoGenH = TemplateString()
|
||||
|
@ -3639,29 +3639,29 @@ class ModuleAutoGen(AutoGen):
|
|||
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
|
||||
self._AutoGenFileList[AutoFile] = str(StringH)
|
||||
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
|
||||
if UniStringBinBuffer != None and UniStringBinBuffer.getvalue() != "":
|
||||
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":
|
||||
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
|
||||
self._AutoGenFileList[AutoFile] = UniStringBinBuffer.getvalue()
|
||||
AutoFile.IsBinary = True
|
||||
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
|
||||
if UniStringBinBuffer != None:
|
||||
if UniStringBinBuffer is not None:
|
||||
UniStringBinBuffer.close()
|
||||
if str(StringIdf) != "":
|
||||
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
|
||||
self._AutoGenFileList[AutoFile] = str(StringIdf)
|
||||
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
|
||||
if IdfGenBinBuffer != None and IdfGenBinBuffer.getvalue() != "":
|
||||
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":
|
||||
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
|
||||
self._AutoGenFileList[AutoFile] = IdfGenBinBuffer.getvalue()
|
||||
AutoFile.IsBinary = True
|
||||
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
|
||||
if IdfGenBinBuffer != None:
|
||||
if IdfGenBinBuffer is not None:
|
||||
IdfGenBinBuffer.close()
|
||||
return self._AutoGenFileList
|
||||
|
||||
## Return the list of library modules explicitly or implicityly used by this module
|
||||
def _GetLibraryList(self):
|
||||
if self._DependentLibraryList == None:
|
||||
if self._DependentLibraryList is None:
|
||||
# only merge library classes and PCD for non-library module
|
||||
if self.IsLibrary:
|
||||
self._DependentLibraryList = []
|
||||
|
@ -3683,7 +3683,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval list The list of PCD
|
||||
#
|
||||
def _GetModulePcdList(self):
|
||||
if self._ModulePcdList == None:
|
||||
if self._ModulePcdList is None:
|
||||
# apply PCD settings from platform
|
||||
self._ModulePcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
|
||||
self.UpdateComments(self._PcdComments, self.Module.PcdComments)
|
||||
|
@ -3694,7 +3694,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval list The list of PCD
|
||||
#
|
||||
def _GetLibraryPcdList(self):
|
||||
if self._LibraryPcdList == None:
|
||||
if self._LibraryPcdList is None:
|
||||
Pcds = sdict()
|
||||
if not self.IsLibrary:
|
||||
# get PCDs from dependent libraries
|
||||
|
@ -3716,7 +3716,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval dict The mapping between GUID cname and its value
|
||||
#
|
||||
def _GetGuidList(self):
|
||||
if self._GuidList == None:
|
||||
if self._GuidList is None:
|
||||
self._GuidList = sdict()
|
||||
self._GuidList.update(self.Module.Guids)
|
||||
for Library in self.DependentLibraryList:
|
||||
|
@ -3726,7 +3726,7 @@ class ModuleAutoGen(AutoGen):
|
|||
return self._GuidList
|
||||
|
||||
def GetGuidsUsedByPcd(self):
|
||||
if self._GuidsUsedByPcd == None:
|
||||
if self._GuidsUsedByPcd is None:
|
||||
self._GuidsUsedByPcd = sdict()
|
||||
self._GuidsUsedByPcd.update(self.Module.GetGuidsUsedByPcd())
|
||||
for Library in self.DependentLibraryList:
|
||||
|
@ -3737,7 +3737,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval dict The mapping between protocol cname and its value
|
||||
#
|
||||
def _GetProtocolList(self):
|
||||
if self._ProtocolList == None:
|
||||
if self._ProtocolList is None:
|
||||
self._ProtocolList = sdict()
|
||||
self._ProtocolList.update(self.Module.Protocols)
|
||||
for Library in self.DependentLibraryList:
|
||||
|
@ -3751,7 +3751,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval dict The mapping between PPI cname and its value
|
||||
#
|
||||
def _GetPpiList(self):
|
||||
if self._PpiList == None:
|
||||
if self._PpiList is None:
|
||||
self._PpiList = sdict()
|
||||
self._PpiList.update(self.Module.Ppis)
|
||||
for Library in self.DependentLibraryList:
|
||||
|
@ -3765,7 +3765,7 @@ class ModuleAutoGen(AutoGen):
|
|||
# @retval list The list path
|
||||
#
|
||||
def _GetIncludePathList(self):
|
||||
if self._IncludePathList == None:
|
||||
if self._IncludePathList is None:
|
||||
self._IncludePathList = []
|
||||
if self.AutoGenVersion < 0x00010005:
|
||||
for Inc in self.Module.Includes:
|
||||
|
@ -3957,7 +3957,7 @@ class ModuleAutoGen(AutoGen):
|
|||
return
|
||||
|
||||
# Skip the following code for modules with no source files
|
||||
if self.SourceFileList == None or self.SourceFileList == []:
|
||||
if self.SourceFileList is None or self.SourceFileList == []:
|
||||
return
|
||||
|
||||
# Skip the following code for modules without any binary files
|
||||
|
@ -4172,7 +4172,7 @@ class ModuleAutoGen(AutoGen):
|
|||
HexFormat = '0x%016x'
|
||||
PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
|
||||
else:
|
||||
if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
|
||||
if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
|
||||
EdkLogger.error("build", AUTOGEN_ERROR,
|
||||
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
|
||||
)
|
||||
|
@ -4452,7 +4452,7 @@ class ModuleAutoGen(AutoGen):
|
|||
|
||||
## Summarize the ModuleAutoGen objects of all libraries used by this module
|
||||
def _GetLibraryAutoGenList(self):
|
||||
if self._LibraryAutoGenList == None:
|
||||
if self._LibraryAutoGenList is None:
|
||||
self._LibraryAutoGenList = []
|
||||
for Library in self.DependentLibraryList:
|
||||
La = ModuleAutoGen(
|
||||
|
@ -4540,7 +4540,7 @@ class ModuleAutoGen(AutoGen):
|
|||
return True
|
||||
|
||||
def GetTimeStampPath(self):
|
||||
if self._TimeStampPath == None:
|
||||
if self._TimeStampPath is None:
|
||||
self._TimeStampPath = os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')
|
||||
return self._TimeStampPath
|
||||
def CreateTimeStamp(self, Makefile):
|
||||
|
|
|
@ -346,12 +346,12 @@ class BuildRule:
|
|||
def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]):
|
||||
self.RuleFile = File
|
||||
# Read build rules from file if it's not none
|
||||
if File != None:
|
||||
if File is not None:
|
||||
try:
|
||||
self.RuleContent = open(File, 'r').readlines()
|
||||
except:
|
||||
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
|
||||
elif Content != None:
|
||||
elif Content is not None:
|
||||
self.RuleContent = Content
|
||||
else:
|
||||
EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")
|
||||
|
@ -478,7 +478,7 @@ class BuildRule:
|
|||
EdkLogger.error("build", FORMAT_INVALID, "No file type given",
|
||||
File=self.RuleFile, Line=LineIndex + 1,
|
||||
ExtraData=self.RuleContent[LineIndex])
|
||||
if self._FileTypePattern.match(FileType) == None:
|
||||
if self._FileTypePattern.match(FileType) is None:
|
||||
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
|
||||
ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
|
||||
# new format: File-Type.Build-Type.Arch
|
||||
|
@ -561,7 +561,7 @@ class BuildRule:
|
|||
FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")]
|
||||
for ToolChainFamily in self._FamilyList:
|
||||
InputFiles = self._RuleInfo[ToolChainFamily, self._State]
|
||||
if InputFiles == None:
|
||||
if InputFiles is None:
|
||||
InputFiles = []
|
||||
self._RuleInfo[ToolChainFamily, self._State] = InputFiles
|
||||
InputFiles.extend(FileList)
|
||||
|
@ -573,7 +573,7 @@ class BuildRule:
|
|||
def ParseCommon(self, LineIndex):
|
||||
for ToolChainFamily in self._FamilyList:
|
||||
Items = self._RuleInfo[ToolChainFamily, self._State]
|
||||
if Items == None:
|
||||
if Items is None:
|
||||
Items = []
|
||||
self._RuleInfo[ToolChainFamily, self._State] = Items
|
||||
Items.append(self.RuleContent[LineIndex])
|
||||
|
|
|
@ -1085,7 +1085,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
|
|||
if not Value.endswith('U'):
|
||||
Value += 'U'
|
||||
if Pcd.DatumType not in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN']:
|
||||
if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
|
||||
if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
|
||||
EdkLogger.error("build", AUTOGEN_ERROR,
|
||||
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
|
||||
ExtraData="[%s]" % str(Info))
|
||||
|
@ -1122,7 +1122,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
|
|||
|
||||
if Pcd.DatumType not in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN', 'VOID*']:
|
||||
# handle structure PCD
|
||||
if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
|
||||
if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
|
||||
EdkLogger.error("build", AUTOGEN_ERROR,
|
||||
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
|
||||
ExtraData="[%s]" % str(Info))
|
||||
|
|
|
@ -360,7 +360,7 @@ class DependencyExpression:
|
|||
|
||||
FilePath = ""
|
||||
FileChangeFlag = True
|
||||
if File == None:
|
||||
if File is None:
|
||||
sys.stdout.write(Buffer.getvalue())
|
||||
FilePath = "STDOUT"
|
||||
else:
|
||||
|
@ -414,13 +414,13 @@ def Main():
|
|||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
elif Option.verbose:
|
||||
EdkLogger.SetLevel(EdkLogger.VERBOSE)
|
||||
elif Option.debug != None:
|
||||
elif Option.debug is not None:
|
||||
EdkLogger.SetLevel(Option.debug + 1)
|
||||
else:
|
||||
EdkLogger.SetLevel(EdkLogger.INFO)
|
||||
|
||||
try:
|
||||
if Option.ModuleType == None or Option.ModuleType not in gType2Phase:
|
||||
if Option.ModuleType is None or Option.ModuleType not in gType2Phase:
|
||||
EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
|
||||
|
||||
DxsFile = ''
|
||||
|
@ -437,7 +437,7 @@ def Main():
|
|||
EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
|
||||
|
||||
Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
|
||||
if Option.OutputFile != None:
|
||||
if Option.OutputFile is not None:
|
||||
FileChangeFlag = Dpx.Generate(Option.OutputFile)
|
||||
if not FileChangeFlag and DxsFile:
|
||||
#
|
||||
|
@ -450,7 +450,7 @@ def Main():
|
|||
Dpx.Generate()
|
||||
except BaseException, X:
|
||||
EdkLogger.quiet("")
|
||||
if Option != None and Option.debug != None:
|
||||
if Option is not None and Option.debug is not None:
|
||||
EdkLogger.quiet(traceback.format_exc())
|
||||
else:
|
||||
EdkLogger.quiet(str(X))
|
||||
|
|
|
@ -906,12 +906,12 @@ cleanlib:
|
|||
# skip non-C files
|
||||
if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c":
|
||||
continue
|
||||
elif DepSet == None:
|
||||
elif DepSet is None:
|
||||
DepSet = set(self.FileDependency[File])
|
||||
else:
|
||||
DepSet &= set(self.FileDependency[File])
|
||||
# in case nothing in SourceFileList
|
||||
if DepSet == None:
|
||||
if DepSet is None:
|
||||
DepSet = set()
|
||||
#
|
||||
# Extract common files list in the dependency files
|
||||
|
@ -1516,7 +1516,7 @@ class TopLevelMakefile(BuildFile):
|
|||
|
||||
# TRICK: for not generating GenFds call in makefile if no FDF file
|
||||
MacroList = []
|
||||
if PlatformInfo.FdfFile != None and PlatformInfo.FdfFile != "":
|
||||
if PlatformInfo.FdfFile is not None and PlatformInfo.FdfFile != "":
|
||||
FdfFileList = [PlatformInfo.FdfFile]
|
||||
# macros passed to GenFds
|
||||
MacroList.append('"%s=%s"' % ("EFI_SOURCE", GlobalData.gEfiSource.replace('\\', '\\\\')))
|
||||
|
|
|
@ -1234,7 +1234,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
|
|||
for SkuName in Pcd.SkuInfoList:
|
||||
Sku = Pcd.SkuInfoList[SkuName]
|
||||
SkuId = Sku.SkuId
|
||||
if SkuId == None or SkuId == '':
|
||||
if SkuId is None or SkuId == '':
|
||||
continue
|
||||
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ class IdfFileClassObject(object):
|
|||
self.LoadIdfFile(File)
|
||||
|
||||
def LoadIdfFile(self, File = None):
|
||||
if File == None:
|
||||
if File is None:
|
||||
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'No Image definition file is given.')
|
||||
self.File = File
|
||||
|
||||
|
@ -106,7 +106,7 @@ class IdfFileClassObject(object):
|
|||
if Len == 4 and LineDetails[2] != 'TRANSPARENT':
|
||||
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'Please use the keyword "TRANSPARENT" to describe the transparency setting in Line %s of File %s.' % (LineNo, File.Path))
|
||||
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', LineDetails[1], re.UNICODE)
|
||||
if MatchString == None or MatchString.end(0) != len(LineDetails[1]):
|
||||
if MatchString is None or MatchString.end(0) != len(LineDetails[1]):
|
||||
EdkLogger.error('Image Definition File Parser', FORMAT_INVALID, 'The Image token name %s defined in Idf file %s contains the invalid character.' % (LineDetails[1], File.Path))
|
||||
if LineDetails[1] not in self.ImageIDList:
|
||||
self.ImageIDList.append(LineDetails[1])
|
||||
|
|
|
@ -150,7 +150,7 @@ def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
|
|||
Name = StringItem.StringName
|
||||
Token = StringItem.Token
|
||||
Referenced = StringItem.Referenced
|
||||
if Name != None:
|
||||
if Name is not None:
|
||||
Line = ''
|
||||
if Referenced == True:
|
||||
if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
|
||||
|
@ -478,11 +478,11 @@ def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
|
|||
# @retval FileList: A list of all files found
|
||||
#
|
||||
def GetFileList(SourceFileList, IncludeList, SkipList):
|
||||
if IncludeList == None:
|
||||
if IncludeList is None:
|
||||
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
|
||||
|
||||
FileList = []
|
||||
if SkipList == None:
|
||||
if SkipList is None:
|
||||
SkipList = []
|
||||
|
||||
for File in SourceFileList:
|
||||
|
|
|
@ -124,7 +124,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
|
|||
if IsCompatibleMode:
|
||||
if length == 3 and LangName.isalpha():
|
||||
TempLangName = LangConvTable.get(LangName.lower())
|
||||
if TempLangName != None:
|
||||
if TempLangName is not None:
|
||||
return TempLangName
|
||||
return LangName
|
||||
else:
|
||||
|
@ -136,7 +136,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
|
|||
if LangName.isalpha():
|
||||
return LangName
|
||||
elif length == 3:
|
||||
if LangName.isalpha() and LangConvTable.get(LangName.lower()) == None:
|
||||
if LangName.isalpha() and LangConvTable.get(LangName.lower()) is None:
|
||||
return LangName
|
||||
elif length == 5:
|
||||
if LangName[0:2].isalpha() and LangName[2] == '-':
|
||||
|
@ -144,7 +144,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
|
|||
elif length >= 6:
|
||||
if LangName[0:2].isalpha() and LangName[2] == '-':
|
||||
return LangName
|
||||
if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) == None and LangName[3] == '-':
|
||||
if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) is None and LangName[3] == '-':
|
||||
return LangName
|
||||
|
||||
EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
|
||||
|
@ -195,14 +195,14 @@ class StringDefClassObject(object):
|
|||
self.UseOtherLangDef = UseOtherLangDef
|
||||
self.Length = 0
|
||||
|
||||
if Name != None:
|
||||
if Name is not None:
|
||||
self.StringName = Name
|
||||
self.StringNameByteList = UniToHexList(Name)
|
||||
if Value != None:
|
||||
if Value is not None:
|
||||
self.StringValue = Value + u'\x00' # Add a NULL at string tail
|
||||
self.StringValueByteList = UniToHexList(self.StringValue)
|
||||
self.Length = len(self.StringValueByteList)
|
||||
if Token != None:
|
||||
if Token is not None:
|
||||
self.Token = Token
|
||||
|
||||
def __str__(self):
|
||||
|
@ -213,7 +213,7 @@ class StringDefClassObject(object):
|
|||
repr(self.UseOtherLangDef)
|
||||
|
||||
def UpdateValue(self, Value = None):
|
||||
if Value != None:
|
||||
if Value is not None:
|
||||
self.StringValue = Value + u'\x00' # Add a NULL at string tail
|
||||
self.StringValueByteList = UniToHexList(self.StringValue)
|
||||
self.Length = len(self.StringValueByteList)
|
||||
|
@ -352,7 +352,7 @@ class UniFileClassObject(object):
|
|||
# Check the string name
|
||||
if Name != '':
|
||||
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)
|
||||
if MatchString == None or MatchString.end(0) != len(Name):
|
||||
if MatchString is None or MatchString.end(0) != len(Name):
|
||||
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
|
||||
LanguageList = Item.split(u'#language ')
|
||||
for IndexI in range(len(LanguageList)):
|
||||
|
@ -466,7 +466,7 @@ class UniFileClassObject(object):
|
|||
# Load a .uni file
|
||||
#
|
||||
def LoadUniFile(self, File = None):
|
||||
if File == None:
|
||||
if File is None:
|
||||
EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
|
||||
self.File = File
|
||||
#
|
||||
|
@ -522,7 +522,7 @@ class UniFileClassObject(object):
|
|||
# Check the string name
|
||||
if not self.IsCompatibleMode and Name != '':
|
||||
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)
|
||||
if MatchString == None or MatchString.end(0) != len(Name):
|
||||
if MatchString is None or MatchString.end(0) != len(Name):
|
||||
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
|
||||
self.AddStringToList(Name, Language, Value)
|
||||
continue
|
||||
|
@ -578,7 +578,7 @@ class UniFileClassObject(object):
|
|||
IsAdded = True
|
||||
if Name in self.OrderedStringDict[Language]:
|
||||
IsAdded = False
|
||||
if Value != None:
|
||||
if Value is not None:
|
||||
ItemIndexInList = self.OrderedStringDict[Language][Name]
|
||||
Item = self.OrderedStringList[Language][ItemIndexInList]
|
||||
Item.UpdateValue(Value)
|
||||
|
|
|
@ -57,21 +57,21 @@ def main():
|
|||
EdkLogger.SetLevel(EdkLogger.VERBOSE)
|
||||
elif Options.opt_quiet:
|
||||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
elif Options.debug_level != None:
|
||||
elif Options.debug_level is not None:
|
||||
EdkLogger.SetLevel(Options.debug_level + 1)
|
||||
else:
|
||||
EdkLogger.SetLevel(EdkLogger.INFO)
|
||||
|
||||
if Options.bin_filename == None:
|
||||
if Options.bin_filename is None:
|
||||
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
|
||||
if Options.filename == None:
|
||||
if Options.filename is None:
|
||||
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
|
||||
|
||||
Force = False
|
||||
if Options.opt_force != None:
|
||||
if Options.opt_force is not None:
|
||||
Force = True
|
||||
|
||||
if (Args[0] != None) :
|
||||
if (Args[0] is not None) :
|
||||
StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)
|
||||
else :
|
||||
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
|
||||
|
|
|
@ -381,7 +381,7 @@ class GenVPD :
|
|||
# Delete useless lines
|
||||
while (True) :
|
||||
try :
|
||||
if (self.FileLinesList[count] == None) :
|
||||
if (self.FileLinesList[count] is None) :
|
||||
del(self.FileLinesList[count])
|
||||
else :
|
||||
count += 1
|
||||
|
@ -398,7 +398,7 @@ class GenVPD :
|
|||
# Process the pcds one by one base on the pcd's value and size
|
||||
count = 0
|
||||
for line in self.FileLinesList:
|
||||
if line != None :
|
||||
if line is not None :
|
||||
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4],line[5], self.InputFileName)
|
||||
# Strip the space char
|
||||
PCD.PcdCName = PCD.PcdCName.strip(' ')
|
||||
|
|
|
@ -116,7 +116,7 @@ class Dec(DecObject):
|
|||
#
|
||||
# Load Dec file if filename is not None
|
||||
#
|
||||
if Filename != None:
|
||||
if Filename is not None:
|
||||
self.LoadDecFile(Filename)
|
||||
|
||||
#
|
||||
|
|
|
@ -54,7 +54,7 @@ def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplit
|
|||
# @param Dict: The dictionary to be printed
|
||||
#
|
||||
def printDict(Dict):
|
||||
if Dict != None:
|
||||
if Dict is not None:
|
||||
KeyList = Dict.keys()
|
||||
for Key in KeyList:
|
||||
if Dict[Key] != '':
|
||||
|
|
|
@ -128,7 +128,7 @@ class Dsc(DscObject):
|
|||
#
|
||||
# Load Dsc file if filename is not None
|
||||
#
|
||||
if Filename != None:
|
||||
if Filename is not None:
|
||||
self.LoadDscFile(Filename)
|
||||
|
||||
#
|
||||
|
@ -902,7 +902,7 @@ class Dsc(DscObject):
|
|||
#
|
||||
def GenSkuInfoList(self, SkuNameList, SkuInfo, VariableName='', VariableGuid='', VariableOffset='', HiiDefaultValue='', VpdOffset='', DefaultValue=''):
|
||||
SkuNameList = GetSplitValueList(SkuNameList)
|
||||
if SkuNameList == None or SkuNameList == [] or SkuNameList == ['']:
|
||||
if SkuNameList is None or SkuNameList == [] or SkuNameList == ['']:
|
||||
SkuNameList = ['DEFAULT']
|
||||
SkuInfoList = {}
|
||||
for Item in SkuNameList:
|
||||
|
|
|
@ -38,7 +38,7 @@ class EdkIIWorkspace:
|
|||
#
|
||||
# Check environment valiable 'WORKSPACE'
|
||||
#
|
||||
if os.environ.get('WORKSPACE') == None:
|
||||
if os.environ.get('WORKSPACE') is None:
|
||||
print 'ERROR: WORKSPACE not defined. Please run EdkSetup from the EDK II install directory.'
|
||||
return False
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ class PcdClassObject(object):
|
|||
# @retval True The two pcds are the same
|
||||
#
|
||||
def __eq__(self, Other):
|
||||
return Other != None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
|
||||
return Other is not None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
|
||||
|
||||
## Override __hash__ function
|
||||
#
|
||||
|
@ -121,7 +121,7 @@ class LibraryClassObject(object):
|
|||
def __init__(self, Name = None, SupModList = [], Type = None):
|
||||
self.LibraryClass = Name
|
||||
self.SupModList = SupModList
|
||||
if Type != None:
|
||||
if Type is not None:
|
||||
self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
|
||||
|
||||
## ModuleBuildClassObject
|
||||
|
@ -864,7 +864,7 @@ class WorkspaceBuild(object):
|
|||
for Libs in Pb.LibraryClass:
|
||||
for Type in Libs.SupModList:
|
||||
Instance = self.FindLibraryClassInstanceOfLibrary(Lib, Arch, Type)
|
||||
if Instance == None:
|
||||
if Instance is None:
|
||||
Instance = RecommendedInstance
|
||||
Pb.LibraryClasses[(Lib, Type)] = Instance
|
||||
else:
|
||||
|
@ -872,7 +872,7 @@ class WorkspaceBuild(object):
|
|||
# For Module
|
||||
#
|
||||
Instance = self.FindLibraryClassInstanceOfModule(Lib, Arch, Pb.ModuleType, Inf)
|
||||
if Instance == None:
|
||||
if Instance is None:
|
||||
Instance = RecommendedInstance
|
||||
Pb.LibraryClasses[(Lib, Pb.ModuleType)] = Instance
|
||||
|
||||
|
@ -912,7 +912,7 @@ class WorkspaceBuild(object):
|
|||
if not self.IsModuleDefinedInPlatform(Inf, Arch, InfList):
|
||||
continue
|
||||
Module = self.Build[Arch].ModuleDatabase[Inf]
|
||||
if Module.LibraryClass == None or Module.LibraryClass == []:
|
||||
if Module.LibraryClass is None or Module.LibraryClass == []:
|
||||
self.UpdateLibrariesOfModule(Platform, Module, Arch)
|
||||
for Key in Module.LibraryClasses:
|
||||
Lib = Module.LibraryClasses[Key]
|
||||
|
@ -969,15 +969,15 @@ class WorkspaceBuild(object):
|
|||
continue
|
||||
|
||||
LibraryClassName = Key[0]
|
||||
if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] == None:
|
||||
if LibraryPath == None or LibraryPath == "":
|
||||
if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] is None:
|
||||
if LibraryPath is None or LibraryPath == "":
|
||||
LibraryInstance[LibraryClassName] = None
|
||||
continue
|
||||
LibraryModule = ModuleDatabase[LibraryPath]
|
||||
LibraryInstance[LibraryClassName] = LibraryModule
|
||||
LibraryConsumerList.append(LibraryModule)
|
||||
EdkLogger.verbose("\t" + LibraryClassName + " : " + str(LibraryModule))
|
||||
elif LibraryPath == None or LibraryPath == "":
|
||||
elif LibraryPath is None or LibraryPath == "":
|
||||
continue
|
||||
else:
|
||||
LibraryModule = LibraryInstance[LibraryClassName]
|
||||
|
@ -1002,7 +1002,7 @@ class WorkspaceBuild(object):
|
|||
Q = []
|
||||
for LibraryClassName in LibraryInstance:
|
||||
M = LibraryInstance[LibraryClassName]
|
||||
if M == None:
|
||||
if M is None:
|
||||
EdkLogger.error("AutoGen", AUTOGEN_ERROR,
|
||||
"Library instance for library class [%s] is not found" % LibraryClassName,
|
||||
ExtraData="\t%s [%s]" % (str(Module), Arch))
|
||||
|
@ -1011,7 +1011,7 @@ class WorkspaceBuild(object):
|
|||
# check if there're duplicate library classes
|
||||
#
|
||||
for Lc in M.LibraryClass:
|
||||
if Lc.SupModList != None and ModuleType not in Lc.SupModList:
|
||||
if Lc.SupModList is not None and ModuleType not in Lc.SupModList:
|
||||
EdkLogger.error("AutoGen", AUTOGEN_ERROR,
|
||||
"Module type [%s] is not supported by library instance [%s]" % (ModuleType, str(M)),
|
||||
ExtraData="\t%s" % str(Module))
|
||||
|
@ -1380,7 +1380,7 @@ class WorkspaceBuild(object):
|
|||
if (Name, Guid) in Pcds:
|
||||
OwnerPlatform = Dsc
|
||||
Pcd = Pcds[(Name, Guid)]
|
||||
if Pcd.Type != '' and Pcd.Type != None:
|
||||
if Pcd.Type != '' and Pcd.Type is not None:
|
||||
NewType = Pcd.Type
|
||||
if NewType in DataType.PCD_DYNAMIC_TYPE_LIST:
|
||||
NewType = DataType.TAB_PCDS_DYNAMIC
|
||||
|
@ -1396,13 +1396,13 @@ class WorkspaceBuild(object):
|
|||
EdkLogger.error("AutoGen", PARSER_ERROR, ErrorMsg)
|
||||
|
||||
|
||||
if Pcd.DatumType != '' and Pcd.DatumType != None:
|
||||
if Pcd.DatumType != '' and Pcd.DatumType is not None:
|
||||
DatumType = Pcd.DatumType
|
||||
if Pcd.TokenValue != '' and Pcd.TokenValue != None:
|
||||
if Pcd.TokenValue != '' and Pcd.TokenValue is not None:
|
||||
Token = Pcd.TokenValue
|
||||
if Pcd.DefaultValue != '' and Pcd.DefaultValue != None:
|
||||
if Pcd.DefaultValue != '' and Pcd.DefaultValue is not None:
|
||||
Value = Pcd.DefaultValue
|
||||
if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize != None:
|
||||
if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize is not None:
|
||||
MaxDatumSize = Pcd.MaxDatumSize
|
||||
SkuInfoList = Pcd.SkuInfoList
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ def debug(Level, Message, ExtraData=None):
|
|||
"msg" : Message,
|
||||
}
|
||||
|
||||
if ExtraData != None:
|
||||
if ExtraData is not None:
|
||||
LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData
|
||||
else:
|
||||
LogText = _DebugMessageTemplate % TemplateDict
|
||||
|
@ -119,10 +119,10 @@ def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
|
|||
return
|
||||
|
||||
# if no tool name given, use caller's source file name as tool name
|
||||
if ToolName == None or ToolName == "":
|
||||
if ToolName is None or ToolName == "":
|
||||
ToolName = os.path.basename(traceback.extract_stack()[-2][0])
|
||||
|
||||
if Line == None:
|
||||
if Line is None:
|
||||
Line = "..."
|
||||
else:
|
||||
Line = "%d" % Line
|
||||
|
@ -134,12 +134,12 @@ def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
|
|||
"msg" : Message,
|
||||
}
|
||||
|
||||
if File != None:
|
||||
if File is not None:
|
||||
LogText = _WarningMessageTemplate % TemplateDict
|
||||
else:
|
||||
LogText = _WarningMessageTemplateWithoutFile % TemplateDict
|
||||
|
||||
if ExtraData != None:
|
||||
if ExtraData is not None:
|
||||
LogText += "\n %s" % ExtraData
|
||||
|
||||
_InfoLogger.log(WARN, LogText)
|
||||
|
@ -168,18 +168,18 @@ info = _InfoLogger.info
|
|||
# it's True. This is the default behavior.
|
||||
#
|
||||
def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):
|
||||
if Line == None:
|
||||
if Line is None:
|
||||
Line = "..."
|
||||
else:
|
||||
Line = "%d" % Line
|
||||
|
||||
if Message == None:
|
||||
if Message is None:
|
||||
if ErrorCode in gErrorMessage:
|
||||
Message = gErrorMessage[ErrorCode]
|
||||
else:
|
||||
Message = gErrorMessage[UNKNOWN_ERROR]
|
||||
|
||||
if ExtraData == None:
|
||||
if ExtraData is None:
|
||||
ExtraData = ""
|
||||
|
||||
TemplateDict = {
|
||||
|
@ -191,7 +191,7 @@ def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=Non
|
|||
"extra" : ExtraData
|
||||
}
|
||||
|
||||
if File != None:
|
||||
if File is not None:
|
||||
LogText = _ErrorMessageTemplate % TemplateDict
|
||||
else:
|
||||
LogText = _ErrorMessageTemplateWithoutFile % TemplateDict
|
||||
|
|
|
@ -51,7 +51,7 @@ class Fdf(FdfObject):
|
|||
#
|
||||
# Load Fdf file if filename is not None
|
||||
#
|
||||
if Filename != None:
|
||||
if Filename is not None:
|
||||
self.LoadFdfFile(Filename)
|
||||
|
||||
#
|
||||
|
|
|
@ -356,7 +356,7 @@ class FdfParser(object):
|
|||
if Profile.FileName == File and Profile.MacroName == Name and Profile.DefinedAtLine <= Line:
|
||||
Value = Profile.MacroValue
|
||||
|
||||
if Value != None:
|
||||
if Value is not None:
|
||||
Str = Str.replace('$(' + Name + ')', Value)
|
||||
MacroEnd = MacroStart + len(Value)
|
||||
|
||||
|
@ -679,8 +679,8 @@ class FdfParser(object):
|
|||
FileLineTuple = GetRealFileLine(self.FileName, Line)
|
||||
if Name in InputMacroDict:
|
||||
MacroValue = InputMacroDict[Name]
|
||||
if Op == None:
|
||||
if Value == 'Bool' and MacroValue == None or MacroValue.upper() == 'FALSE':
|
||||
if Op is None:
|
||||
if Value == 'Bool' and MacroValue is None or MacroValue.upper() == 'FALSE':
|
||||
return False
|
||||
return True
|
||||
elif Op == '!=':
|
||||
|
@ -694,7 +694,7 @@ class FdfParser(object):
|
|||
else:
|
||||
return False
|
||||
else:
|
||||
if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue != None and MacroValue.isdigit())):
|
||||
if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue is not None and MacroValue.isdigit())):
|
||||
InputVal = long(Value, 0)
|
||||
MacroVal = long(MacroValue, 0)
|
||||
if Op == '>':
|
||||
|
@ -724,8 +724,8 @@ class FdfParser(object):
|
|||
|
||||
for Profile in AllMacroList:
|
||||
if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
|
||||
if Op == None:
|
||||
if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
|
||||
if Op is None:
|
||||
if Value == 'Bool' and Profile.MacroValue is None or Profile.MacroValue.upper() == 'FALSE':
|
||||
return False
|
||||
return True
|
||||
elif Op == '!=':
|
||||
|
@ -739,7 +739,7 @@ class FdfParser(object):
|
|||
else:
|
||||
return False
|
||||
else:
|
||||
if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue != None and Profile.MacroValue.isdigit())):
|
||||
if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue is not None and Profile.MacroValue.isdigit())):
|
||||
InputVal = long(Value, 0)
|
||||
MacroVal = long(Profile.MacroValue, 0)
|
||||
if Op == '>':
|
||||
|
@ -935,7 +935,7 @@ class FdfParser(object):
|
|||
|
||||
if not self.__GetNextToken():
|
||||
return False
|
||||
if gGuidPattern.match(self.__Token) != None:
|
||||
if gGuidPattern.match(self.__Token) is not None:
|
||||
return True
|
||||
else:
|
||||
self.__UndoToken()
|
||||
|
@ -1454,7 +1454,7 @@ class FdfParser(object):
|
|||
pass
|
||||
|
||||
for Item in Obj.BlockSizeList:
|
||||
if Item[0] == None or Item[1] == None:
|
||||
if Item[0] is None or Item[1] is None:
|
||||
raise Warning("expected block statement for Fd Section", self.FileName, self.CurrentLineNumber)
|
||||
|
||||
return True
|
||||
|
@ -2423,7 +2423,7 @@ class FdfParser(object):
|
|||
|
||||
FvImageSectionObj = CommonDataClass.FdfClass.FvImageSectionClassObject()
|
||||
FvImageSectionObj.Alignment = AlignValue
|
||||
if FvObj != None:
|
||||
if FvObj is not None:
|
||||
FvImageSectionObj.Fv = FvObj
|
||||
FvImageSectionObj.FvName = None
|
||||
else:
|
||||
|
@ -2942,7 +2942,7 @@ class FdfParser(object):
|
|||
Rule.CheckSum = CheckSum
|
||||
Rule.Fixed = Fixed
|
||||
Rule.KeyStringList = KeyStringList
|
||||
if KeepReloc != None:
|
||||
if KeepReloc is not None:
|
||||
Rule.KeepReloc = KeepReloc
|
||||
|
||||
while True:
|
||||
|
@ -2969,7 +2969,7 @@ class FdfParser(object):
|
|||
Rule.Fixed = Fixed
|
||||
Rule.FileExtension = Ext
|
||||
Rule.KeyStringList = KeyStringList
|
||||
if KeepReloc != None:
|
||||
if KeepReloc is not None:
|
||||
Rule.KeepReloc = KeepReloc
|
||||
|
||||
return Rule
|
||||
|
@ -3012,7 +3012,7 @@ class FdfParser(object):
|
|||
Rule.Fixed = Fixed
|
||||
Rule.FileName = self.__Token
|
||||
Rule.KeyStringList = KeyStringList
|
||||
if KeepReloc != None:
|
||||
if KeepReloc is not None:
|
||||
Rule.KeepReloc = KeepReloc
|
||||
return Rule
|
||||
|
||||
|
@ -3149,7 +3149,7 @@ class FdfParser(object):
|
|||
EfiSectionObj.KeepReloc = False
|
||||
else:
|
||||
EfiSectionObj.KeepReloc = True
|
||||
if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
|
||||
if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
|
||||
raise Warning("Section type %s has reloc strip flag conflict with Rule At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
|
||||
else:
|
||||
raise Warning("Section type %s could not have reloc strip flag At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
|
||||
|
@ -3471,7 +3471,7 @@ class FdfParser(object):
|
|||
raise Warning("expected Component version At Line ", self.FileName, self.CurrentLineNumber)
|
||||
|
||||
Pattern = re.compile('-$|[0-9]{0,1}[0-9]{1}\.[0-9]{0,1}[0-9]{1}')
|
||||
if Pattern.match(self.__Token) == None:
|
||||
if Pattern.match(self.__Token) is None:
|
||||
raise Warning("Unknown version format At line ", self.FileName, self.CurrentLineNumber)
|
||||
CompStatementObj.CompVer = self.__Token
|
||||
|
||||
|
@ -3544,7 +3544,7 @@ class FdfParser(object):
|
|||
for elementRegion in FdObj.RegionList:
|
||||
if elementRegion.RegionType == 'FV':
|
||||
for elementRegionData in elementRegion.RegionDataList:
|
||||
if elementRegionData != None and elementRegionData.upper() not in FvList:
|
||||
if elementRegionData is not None and elementRegionData.upper() not in FvList:
|
||||
FvList.append(elementRegionData.upper())
|
||||
return FvList
|
||||
|
||||
|
@ -3561,9 +3561,9 @@ class FdfParser(object):
|
|||
|
||||
for FfsObj in FvObj.FfsList:
|
||||
if isinstance(FfsObj, FfsFileStatement.FileStatement):
|
||||
if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
|
||||
if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:
|
||||
RefFvList.append(FfsObj.FvName.upper())
|
||||
elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
|
||||
elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:
|
||||
RefFdList.append(FfsObj.FdName.upper())
|
||||
else:
|
||||
self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
|
||||
|
@ -3584,9 +3584,9 @@ class FdfParser(object):
|
|||
while SectionStack != []:
|
||||
SectionObj = SectionStack.pop()
|
||||
if isinstance(SectionObj, FvImageSection.FvImageSection):
|
||||
if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
|
||||
if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:
|
||||
FvList.append(SectionObj.FvName.upper())
|
||||
if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
|
||||
if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:
|
||||
FvList.append(SectionObj.Fv.UiFvName.upper())
|
||||
self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
|
||||
|
||||
|
|
|
@ -199,7 +199,7 @@ class Inf(InfObject):
|
|||
#
|
||||
# Load Inf file if filename is not None
|
||||
#
|
||||
if Filename != None:
|
||||
if Filename is not None:
|
||||
self.LoadInfFile(Filename)
|
||||
|
||||
#
|
||||
|
|
|
@ -85,7 +85,7 @@ def _parseForXcode(lines, efifilepath, varnames):
|
|||
for varname in varnames:
|
||||
if varname in line:
|
||||
m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*%s)$' % varname, line)
|
||||
if m != None:
|
||||
if m is not None:
|
||||
ret.append((varname, m.group(1)))
|
||||
return ret
|
||||
|
||||
|
@ -110,27 +110,27 @@ def _parseForGCC(lines, efifilepath, varnames):
|
|||
# status handler
|
||||
if status == 3:
|
||||
m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line)
|
||||
if m != None:
|
||||
if m is not None:
|
||||
sections.append(m.groups(0))
|
||||
for varname in varnames:
|
||||
Str = ''
|
||||
m = re.match("^.data.(%s)" % varname, line)
|
||||
if m != None:
|
||||
if m is not None:
|
||||
m = re.match(".data.(%s)$" % varname, line)
|
||||
if m != None:
|
||||
if m is not None:
|
||||
Str = lines[index + 1]
|
||||
else:
|
||||
Str = line[len(".data.%s" % varname):]
|
||||
if Str:
|
||||
m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', Str.strip())
|
||||
if m != None:
|
||||
if m is not None:
|
||||
varoffset.append((varname, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0]))
|
||||
|
||||
if not varoffset:
|
||||
return []
|
||||
# get section information from efi file
|
||||
efisecs = PeImageClass(efifilepath).SectionHeaderList
|
||||
if efisecs == None or len(efisecs) == 0:
|
||||
if efisecs is None or len(efisecs) == 0:
|
||||
return []
|
||||
#redirection
|
||||
redirection = 0
|
||||
|
@ -166,19 +166,19 @@ def _parseGeneral(lines, efifilepath, varnames):
|
|||
continue
|
||||
if status == 1 and len(line) != 0:
|
||||
m = secRe.match(line)
|
||||
assert m != None, "Fail to parse the section in map file , line is %s" % line
|
||||
assert m is not None, "Fail to parse the section in map file , line is %s" % line
|
||||
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
|
||||
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
|
||||
if status == 2 and len(line) != 0:
|
||||
for varname in varnames:
|
||||
m = symRe.match(line)
|
||||
assert m != None, "Fail to parse the symbol in map file, line is %s" % line
|
||||
assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
|
||||
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
|
||||
sec_no = int(sec_no, 16)
|
||||
sym_offset = int(sym_offset, 16)
|
||||
vir_addr = int(vir_addr, 16)
|
||||
m2 = re.match('^[_]*(%s)' % varname, sym_name)
|
||||
if m2 != None:
|
||||
if m2 is not None:
|
||||
# fond a binary pcd entry in map file
|
||||
for sec in secs:
|
||||
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
|
||||
|
@ -188,7 +188,7 @@ def _parseGeneral(lines, efifilepath, varnames):
|
|||
|
||||
# get section information from efi file
|
||||
efisecs = PeImageClass(efifilepath).SectionHeaderList
|
||||
if efisecs == None or len(efisecs) == 0:
|
||||
if efisecs is None or len(efisecs) == 0:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
@ -423,7 +423,7 @@ def GuidStructureStringToGuidValueName(GuidValue):
|
|||
# @param Directory The directory name
|
||||
#
|
||||
def CreateDirectory(Directory):
|
||||
if Directory == None or Directory.strip() == "":
|
||||
if Directory is None or Directory.strip() == "":
|
||||
return True
|
||||
try:
|
||||
if not os.access(Directory, os.F_OK):
|
||||
|
@ -437,7 +437,7 @@ def CreateDirectory(Directory):
|
|||
# @param Directory The directory name
|
||||
#
|
||||
def RemoveDirectory(Directory, Recursively=False):
|
||||
if Directory == None or Directory.strip() == "" or not os.path.exists(Directory):
|
||||
if Directory is None or Directory.strip() == "" or not os.path.exists(Directory):
|
||||
return
|
||||
if Recursively:
|
||||
CurrentDirectory = os.getcwd()
|
||||
|
@ -540,7 +540,7 @@ def DataDump(Data, File):
|
|||
except:
|
||||
EdkLogger.error("", FILE_OPEN_FAILURE, ExtraData=File, RaiseError=False)
|
||||
finally:
|
||||
if Fd != None:
|
||||
if Fd is not None:
|
||||
Fd.close()
|
||||
|
||||
## Restore a Python object from a file
|
||||
|
@ -560,7 +560,7 @@ def DataRestore(File):
|
|||
EdkLogger.verbose("Failed to load [%s]\n\t%s" % (File, str(e)))
|
||||
Data = None
|
||||
finally:
|
||||
if Fd != None:
|
||||
if Fd is not None:
|
||||
Fd.close()
|
||||
return Data
|
||||
|
||||
|
@ -668,7 +668,7 @@ def GetFiles(Root, SkipList=None, FullPath=True):
|
|||
# @retval False if file doesn't exists
|
||||
#
|
||||
def ValidFile(File, Ext=None):
|
||||
if Ext != None:
|
||||
if Ext is not None:
|
||||
Dummy, FileExt = os.path.splitext(File)
|
||||
if FileExt.lower() != Ext.lower():
|
||||
return False
|
||||
|
@ -715,13 +715,13 @@ def RealPath2(File, Dir='', OverrideDir=''):
|
|||
#
|
||||
def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):
|
||||
NewFile = File
|
||||
if Ext != None:
|
||||
if Ext is not None:
|
||||
Dummy, FileExt = os.path.splitext(File)
|
||||
if FileExt.lower() != Ext.lower():
|
||||
return False, File
|
||||
|
||||
# Replace the Edk macros
|
||||
if OverrideDir != '' and OverrideDir != None:
|
||||
if OverrideDir != '' and OverrideDir is not None:
|
||||
if OverrideDir.find('$(EFI_SOURCE)') > -1:
|
||||
OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)
|
||||
if OverrideDir.find('$(EDK_SOURCE)') > -1:
|
||||
|
@ -737,19 +737,19 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='
|
|||
NewFile = File.replace('$(EFI_SOURCE)', EfiSource)
|
||||
NewFile = NewFile.replace('$(EDK_SOURCE)', EdkSource)
|
||||
NewFile = AllFiles[os.path.normpath(NewFile)]
|
||||
if NewFile != None:
|
||||
if NewFile is not None:
|
||||
return True, NewFile
|
||||
|
||||
# Second check the path with override value
|
||||
if OverrideDir != '' and OverrideDir != None:
|
||||
if OverrideDir != '' and OverrideDir is not None:
|
||||
NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
|
||||
if NewFile != None:
|
||||
if NewFile is not None:
|
||||
return True, NewFile
|
||||
|
||||
# Last check the path with normal definitions
|
||||
File = os.path.join(Dir, File)
|
||||
NewFile = AllFiles[os.path.normpath(File)]
|
||||
if NewFile != None:
|
||||
if NewFile is not None:
|
||||
return True, NewFile
|
||||
|
||||
return False, File
|
||||
|
@ -759,7 +759,7 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='
|
|||
#
|
||||
def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):
|
||||
# Replace the Edk macros
|
||||
if OverrideDir != '' and OverrideDir != None:
|
||||
if OverrideDir != '' and OverrideDir is not None:
|
||||
if OverrideDir.find('$(EFI_SOURCE)') > -1:
|
||||
OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)
|
||||
if OverrideDir.find('$(EDK_SOURCE)') > -1:
|
||||
|
@ -781,23 +781,23 @@ def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.'
|
|||
File = File.replace('$(EFI_SOURCE)', EfiSource)
|
||||
File = File.replace('$(EDK_SOURCE)', EdkSource)
|
||||
NewFile = AllFiles[os.path.normpath(File)]
|
||||
if NewFile != None:
|
||||
if NewFile is not None:
|
||||
NewRelaPath = os.path.dirname(NewFile)
|
||||
File = os.path.basename(NewFile)
|
||||
#NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]
|
||||
break
|
||||
|
||||
# Second check the path with override value
|
||||
if OverrideDir != '' and OverrideDir != None:
|
||||
if OverrideDir != '' and OverrideDir is not None:
|
||||
NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
|
||||
if NewFile != None:
|
||||
if NewFile is not None:
|
||||
#NewRelaPath = os.path.dirname(NewFile)
|
||||
NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]
|
||||
break
|
||||
|
||||
# Last check the path with normal definitions
|
||||
NewFile = AllFiles[os.path.normpath(os.path.join(Dir, File))]
|
||||
if NewFile != None:
|
||||
if NewFile is not None:
|
||||
break
|
||||
|
||||
# No file found
|
||||
|
@ -1062,7 +1062,7 @@ class Progressor:
|
|||
self.CodaMessage = CloseMessage
|
||||
self.ProgressChar = ProgressChar
|
||||
self.Interval = Interval
|
||||
if Progressor._StopFlag == None:
|
||||
if Progressor._StopFlag is None:
|
||||
Progressor._StopFlag = threading.Event()
|
||||
|
||||
## Start to print progress charater
|
||||
|
@ -1070,10 +1070,10 @@ class Progressor:
|
|||
# @param OpenMessage The string printed before progress charaters
|
||||
#
|
||||
def Start(self, OpenMessage=None):
|
||||
if OpenMessage != None:
|
||||
if OpenMessage is not None:
|
||||
self.PromptMessage = OpenMessage
|
||||
Progressor._StopFlag.clear()
|
||||
if Progressor._ProgressThread == None:
|
||||
if Progressor._ProgressThread is None:
|
||||
Progressor._ProgressThread = threading.Thread(target=self._ProgressThreadEntry)
|
||||
Progressor._ProgressThread.setDaemon(False)
|
||||
Progressor._ProgressThread.start()
|
||||
|
@ -1084,7 +1084,7 @@ class Progressor:
|
|||
#
|
||||
def Stop(self, CloseMessage=None):
|
||||
OriginalCodaMessage = self.CodaMessage
|
||||
if CloseMessage != None:
|
||||
if CloseMessage is not None:
|
||||
self.CodaMessage = CloseMessage
|
||||
self.Abort()
|
||||
self.CodaMessage = OriginalCodaMessage
|
||||
|
@ -1107,9 +1107,9 @@ class Progressor:
|
|||
## Abort the progress display
|
||||
@staticmethod
|
||||
def Abort():
|
||||
if Progressor._StopFlag != None:
|
||||
if Progressor._StopFlag is not None:
|
||||
Progressor._StopFlag.set()
|
||||
if Progressor._ProgressThread != None:
|
||||
if Progressor._ProgressThread is not None:
|
||||
Progressor._ProgressThread.join()
|
||||
Progressor._ProgressThread = None
|
||||
|
||||
|
@ -1228,7 +1228,7 @@ class sdict(IterableUserDict):
|
|||
return key, value
|
||||
|
||||
def update(self, dict=None, **kwargs):
|
||||
if dict != None:
|
||||
if dict is not None:
|
||||
for k, v in dict.items():
|
||||
self[k] = v
|
||||
if len(kwargs):
|
||||
|
@ -1301,7 +1301,7 @@ class tdict:
|
|||
if self._Level_ > 1:
|
||||
RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
|
||||
|
||||
if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:
|
||||
if FirstKey is None or str(FirstKey).upper() in self._ValidWildcardList:
|
||||
FirstKey = self._Wildcard
|
||||
|
||||
if self._Single_:
|
||||
|
@ -1316,24 +1316,24 @@ class tdict:
|
|||
if FirstKey == self._Wildcard:
|
||||
if FirstKey in self.data:
|
||||
Value = self.data[FirstKey][RestKeys]
|
||||
if Value == None:
|
||||
if Value is None:
|
||||
for Key in self.data:
|
||||
Value = self.data[Key][RestKeys]
|
||||
if Value != None: break
|
||||
if Value is not None: break
|
||||
else:
|
||||
if FirstKey in self.data:
|
||||
Value = self.data[FirstKey][RestKeys]
|
||||
if Value == None and self._Wildcard in self.data:
|
||||
if Value is None and self._Wildcard in self.data:
|
||||
#print "Value=None"
|
||||
Value = self.data[self._Wildcard][RestKeys]
|
||||
else:
|
||||
if FirstKey == self._Wildcard:
|
||||
if FirstKey in self.data:
|
||||
Value = self.data[FirstKey]
|
||||
if Value == None:
|
||||
if Value is None:
|
||||
for Key in self.data:
|
||||
Value = self.data[Key]
|
||||
if Value != None: break
|
||||
if Value is not None: break
|
||||
else:
|
||||
if FirstKey in self.data:
|
||||
Value = self.data[FirstKey]
|
||||
|
@ -2066,7 +2066,7 @@ class PathClass(object):
|
|||
return hash(self.Path)
|
||||
|
||||
def _GetFileKey(self):
|
||||
if self._Key == None:
|
||||
if self._Key is None:
|
||||
self._Key = self.Path.upper() # + self.ToolChainFamily + self.TagName + self.ToolCode + self.Target
|
||||
return self._Key
|
||||
|
||||
|
|
|
@ -299,7 +299,7 @@ def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo = -1):
|
|||
#
|
||||
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):
|
||||
Format = '<TokenSpaceGuidCName>.<PcdCName>'
|
||||
if TokenInfoString != '' and TokenInfoString != None:
|
||||
if TokenInfoString != '' and TokenInfoString is not None:
|
||||
TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)
|
||||
if len(TokenInfoList) == 2:
|
||||
return True
|
||||
|
@ -550,7 +550,7 @@ def GetComponents(Lines, Key, KeyValues, CommentCharacter):
|
|||
LineList = Lines.split('\n')
|
||||
for Line in LineList:
|
||||
Line = CleanString(Line, CommentCharacter)
|
||||
if Line == None or Line == '':
|
||||
if Line is None or Line == '':
|
||||
continue
|
||||
|
||||
if findBlock == False:
|
||||
|
|
|
@ -634,7 +634,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
|
|||
# @retval True The file type is correct
|
||||
#
|
||||
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
|
||||
if CheckFilename != '' and CheckFilename != None:
|
||||
if CheckFilename != '' and CheckFilename is not None:
|
||||
(Root, Ext) = os.path.splitext(CheckFilename)
|
||||
if Ext.upper() != ExtName.upper():
|
||||
ContainerFile = open(ContainerFilename, 'r').read()
|
||||
|
@ -662,7 +662,7 @@ def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line,
|
|||
#
|
||||
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
|
||||
CheckFile = ''
|
||||
if CheckFilename != '' and CheckFilename != None:
|
||||
if CheckFilename != '' and CheckFilename is not None:
|
||||
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
|
||||
if not os.path.isfile(CheckFile):
|
||||
ContainerFile = open(ContainerFilename, 'r').read()
|
||||
|
|
|
@ -45,7 +45,7 @@ class TargetTxtClassObject(object):
|
|||
DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
|
||||
}
|
||||
self.ConfDirectoryPath = ""
|
||||
if Filename != None:
|
||||
if Filename is not None:
|
||||
self.LoadTargetTxtFile(Filename)
|
||||
|
||||
## LoadTargetTxtFile
|
||||
|
@ -83,7 +83,7 @@ class TargetTxtClassObject(object):
|
|||
self.ConfDirectoryPath = os.path.dirname(FileName)
|
||||
except:
|
||||
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
|
||||
if F != None:
|
||||
if F is not None:
|
||||
F.close()
|
||||
|
||||
for Line in F:
|
||||
|
@ -144,7 +144,7 @@ class TargetTxtClassObject(object):
|
|||
# @param Dict: The dictionary to be printed
|
||||
#
|
||||
def printDict(Dict):
|
||||
if Dict != None:
|
||||
if Dict is not None:
|
||||
KeyList = Dict.keys()
|
||||
for Key in KeyList:
|
||||
if Dict[Key] != '':
|
||||
|
|
|
@ -53,7 +53,7 @@ class ToolDefClassObject(object):
|
|||
for Env in os.environ:
|
||||
self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]
|
||||
|
||||
if FileName != None:
|
||||
if FileName is not None:
|
||||
self.LoadToolDefFile(FileName)
|
||||
|
||||
## LoadToolDefFile
|
||||
|
|
|
@ -89,7 +89,7 @@ class VpdInfoFile:
|
|||
# @param offset integer value for VPD's offset in specific SKU.
|
||||
#
|
||||
def Add(self, Vpd, skuname,Offset):
|
||||
if (Vpd == None):
|
||||
if (Vpd is None):
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
|
||||
|
||||
if not (Offset >= 0 or Offset == "*"):
|
||||
|
@ -100,7 +100,7 @@ class VpdInfoFile:
|
|||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
||||
elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]:
|
||||
if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":
|
||||
if Vpd.MaxDatumSize is None or Vpd.MaxDatumSize == "":
|
||||
Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]
|
||||
else:
|
||||
if Vpd.MaxDatumSize <= 0:
|
||||
|
@ -122,7 +122,7 @@ class VpdInfoFile:
|
|||
# If
|
||||
# @param FilePath The given file path which would hold VPD information
|
||||
def Write(self, FilePath):
|
||||
if not (FilePath != None or len(FilePath) != 0):
|
||||
if not (FilePath is not None or len(FilePath) != 0):
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||
"Invalid parameter FilePath: %s." % FilePath)
|
||||
|
||||
|
@ -227,8 +227,8 @@ class VpdInfoFile:
|
|||
# @param VpdFileName The string path name for VPD information guid.txt
|
||||
#
|
||||
def CallExtenalBPDGTool(ToolPath, VpdFileName):
|
||||
assert ToolPath != None, "Invalid parameter ToolPath"
|
||||
assert VpdFileName != None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
|
||||
assert ToolPath is not None, "Invalid parameter ToolPath"
|
||||
assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
|
||||
|
||||
OutputDir = os.path.dirname(VpdFileName)
|
||||
FileName = os.path.basename(VpdFileName)
|
||||
|
@ -250,7 +250,7 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
|
|||
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))
|
||||
(out, error) = PopenObject.communicate()
|
||||
print out
|
||||
while PopenObject.returncode == None :
|
||||
while PopenObject.returncode is None :
|
||||
PopenObject.wait()
|
||||
|
||||
if PopenObject.returncode != 0:
|
||||
|
|
|
@ -44,11 +44,11 @@ def GenerateHelpText(Text, Lang):
|
|||
class CommonClass(object):
|
||||
def __init__(self, Usage = None, FeatureFlag = '', SupArchList = None, HelpText = ''):
|
||||
self.Usage = Usage
|
||||
if self.Usage == None:
|
||||
if self.Usage is None:
|
||||
self.Usage = []
|
||||
self.FeatureFlag = FeatureFlag
|
||||
self.SupArchList = SupArchList
|
||||
if self.SupArchList == None:
|
||||
if self.SupArchList is None:
|
||||
self.SupArchList = []
|
||||
self.HelpText = HelpText
|
||||
self.HelpTextList = []
|
||||
|
@ -375,13 +375,13 @@ class PcdClass(CommonClass):
|
|||
self.PcdCName = ''
|
||||
self.Value = ''
|
||||
self.Offset = ''
|
||||
if self.ValidUsage == None:
|
||||
if self.ValidUsage is None:
|
||||
self.ValidUsage = []
|
||||
self.SkuInfoList = SkuInfoList
|
||||
if self.SkuInfoList == None:
|
||||
if self.SkuInfoList is None:
|
||||
self.SkuInfoList = {}
|
||||
self.SupModuleList = SupModuleList
|
||||
if self.SupModuleList == None:
|
||||
if self.SupModuleList is None:
|
||||
self.SupModuleList = []
|
||||
CommonClass.__init__(self)
|
||||
self.PcdErrors = []
|
||||
|
|
|
@ -783,14 +783,14 @@ class CParser(Parser):
|
|||
|
||||
if self.backtracking == 0:
|
||||
|
||||
if d != None:
|
||||
if d is not None:
|
||||
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)
|
||||
else:
|
||||
self.function_definition_stack[-1].ModifierText = ''
|
||||
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)
|
||||
self.function_definition_stack[-1].DeclLine = declarator1.start.line
|
||||
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
|
||||
if a != None:
|
||||
if a is not None:
|
||||
self.function_definition_stack[-1].LBLine = a.start.line
|
||||
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
|
||||
else:
|
||||
|
@ -920,7 +920,7 @@ class CParser(Parser):
|
|||
return
|
||||
if self.backtracking == 0:
|
||||
|
||||
if b != None:
|
||||
if b is not None:
|
||||
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))
|
||||
else:
|
||||
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))
|
||||
|
@ -957,7 +957,7 @@ class CParser(Parser):
|
|||
return
|
||||
if self.backtracking == 0:
|
||||
|
||||
if t != None:
|
||||
if t is not None:
|
||||
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))
|
||||
|
||||
|
||||
|
@ -1401,7 +1401,7 @@ class CParser(Parser):
|
|||
return
|
||||
if self.backtracking == 0:
|
||||
|
||||
if s.stop != None:
|
||||
if s.stop is not None:
|
||||
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))
|
||||
|
||||
|
||||
|
@ -1416,7 +1416,7 @@ class CParser(Parser):
|
|||
return
|
||||
if self.backtracking == 0:
|
||||
|
||||
if e.stop != None:
|
||||
if e.stop is not None:
|
||||
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))
|
||||
|
||||
|
||||
|
|
|
@ -1299,7 +1299,7 @@ class Check(object):
|
|||
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
|
||||
for Record in RecordSet:
|
||||
Name = Record[1].strip()
|
||||
if Name != '' and Name != None:
|
||||
if Name != '' and Name is not None:
|
||||
if Name[0] == '(':
|
||||
Name = Name[1:Name.find(')')]
|
||||
if Name.find('(') > -1:
|
||||
|
|
|
@ -301,7 +301,7 @@ class CodeFragmentCollector:
|
|||
InCharLiteral = not InCharLiteral
|
||||
# meet new line, then no longer in a comment for // and '#'
|
||||
if self.__CurrentChar() == T_CHAR_LF:
|
||||
if HashComment and PPDirectiveObj != None:
|
||||
if HashComment and PPDirectiveObj is not None:
|
||||
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
|
||||
PPDirectiveObj.Content += T_CHAR_LF
|
||||
PPExtend = True
|
||||
|
@ -423,7 +423,7 @@ class CodeFragmentCollector:
|
|||
InCharLiteral = not InCharLiteral
|
||||
# meet new line, then no longer in a comment for // and '#'
|
||||
if self.__CurrentChar() == T_CHAR_LF:
|
||||
if HashComment and PPDirectiveObj != None:
|
||||
if HashComment and PPDirectiveObj is not None:
|
||||
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
|
||||
PPDirectiveObj.Content += T_CHAR_LF
|
||||
PPExtend = True
|
||||
|
|
|
@ -178,7 +178,7 @@ class Ecc(object):
|
|||
self.BuildMetaDataFileDatabase(SpeciDirs)
|
||||
if self.ScanSourceCode:
|
||||
EdkLogger.quiet("Building database for Meta Data File Done!")
|
||||
if SpeciDirs == None:
|
||||
if SpeciDirs is None:
|
||||
c.CollectSourceCodeDataIntoDB(EccGlobalData.gTarget)
|
||||
else:
|
||||
for specificDir in SpeciDirs:
|
||||
|
@ -195,7 +195,7 @@ class Ecc(object):
|
|||
#
|
||||
def BuildMetaDataFileDatabase(self, SpecificDirs = None):
|
||||
ScanFolders = []
|
||||
if SpecificDirs == None:
|
||||
if SpecificDirs is None:
|
||||
ScanFolders.append(EccGlobalData.gTarget)
|
||||
else:
|
||||
for specificDir in SpecificDirs:
|
||||
|
@ -346,15 +346,15 @@ class Ecc(object):
|
|||
self.SetLogLevel(Options)
|
||||
|
||||
# Set other options
|
||||
if Options.ConfigFile != None:
|
||||
if Options.ConfigFile is not None:
|
||||
self.ConfigFile = Options.ConfigFile
|
||||
if Options.OutputFile != None:
|
||||
if Options.OutputFile is not None:
|
||||
self.OutputFile = Options.OutputFile
|
||||
if Options.ReportFile != None:
|
||||
if Options.ReportFile is not None:
|
||||
self.ReportFile = Options.ReportFile
|
||||
if Options.ExceptionFile != None:
|
||||
if Options.ExceptionFile is not None:
|
||||
self.ExceptionFile = Options.ExceptionFile
|
||||
if Options.Target != None:
|
||||
if Options.Target is not None:
|
||||
if not os.path.isdir(Options.Target):
|
||||
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Target [%s] does NOT exist" % Options.Target)
|
||||
else:
|
||||
|
@ -362,15 +362,15 @@ class Ecc(object):
|
|||
else:
|
||||
EdkLogger.warn("Ecc", EdkLogger.ECC_ERROR, "The target source tree was not specified, using current WORKSPACE instead!")
|
||||
EccGlobalData.gTarget = os.path.normpath(os.getenv("WORKSPACE"))
|
||||
if Options.keepdatabase != None:
|
||||
if Options.keepdatabase is not None:
|
||||
self.IsInit = False
|
||||
if Options.metadata != None and Options.sourcecode != None:
|
||||
if Options.metadata is not None and Options.sourcecode is not None:
|
||||
EdkLogger.error("ECC", BuildToolError.OPTION_CONFLICT, ExtraData="-m and -s can't be specified at one time")
|
||||
if Options.metadata != None:
|
||||
if Options.metadata is not None:
|
||||
self.ScanSourceCode = False
|
||||
if Options.sourcecode != None:
|
||||
if Options.sourcecode is not None:
|
||||
self.ScanMetaData = False
|
||||
if Options.folders != None:
|
||||
if Options.folders is not None:
|
||||
self.OnlyScan = True
|
||||
|
||||
## SetLogLevel
|
||||
|
@ -380,11 +380,11 @@ class Ecc(object):
|
|||
# @param Option: The option list including log level setting
|
||||
#
|
||||
def SetLogLevel(self, Option):
|
||||
if Option.verbose != None:
|
||||
if Option.verbose is not None:
|
||||
EdkLogger.SetLevel(EdkLogger.VERBOSE)
|
||||
elif Option.quiet != None:
|
||||
elif Option.quiet is not None:
|
||||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
elif Option.debug != None:
|
||||
elif Option.debug is not None:
|
||||
EdkLogger.SetLevel(Option.debug + 1)
|
||||
else:
|
||||
EdkLogger.SetLevel(EdkLogger.INFO)
|
||||
|
|
|
@ -116,7 +116,7 @@ class Table(object):
|
|||
SqlCommand = """select max(ID) from %s""" % self.Table
|
||||
Record = self.Cur.execute(SqlCommand).fetchall()
|
||||
Id = Record[0][0]
|
||||
if Id == None:
|
||||
if Id is None:
|
||||
Id = self.IdBase
|
||||
return Id
|
||||
|
||||
|
@ -191,7 +191,7 @@ class TableDataModel(Table):
|
|||
def InitTable(self):
|
||||
EdkLogger.verbose("\nInitialize table DataModel started ...")
|
||||
Count = self.GetCount()
|
||||
if Count != None and Count != 0:
|
||||
if Count is not None and Count != 0:
|
||||
return
|
||||
for Item in DataClass.MODEL_LIST:
|
||||
CrossIndex = Item[1]
|
||||
|
|
|
@ -228,7 +228,7 @@ class MetaFileParser(object):
|
|||
self.Start()
|
||||
|
||||
# No specific ARCH or Platform given, use raw data
|
||||
if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None):
|
||||
if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
|
||||
return self._RawTable.Query(*DataInfo)
|
||||
|
||||
# Do post-process if necessary
|
||||
|
@ -564,7 +564,7 @@ class InfParser(MetaFileParser):
|
|||
self._ValueList = ['','','']
|
||||
# parse current line, result will be put in self._ValueList
|
||||
self._SectionParser[self._SectionType](self)
|
||||
if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
|
||||
if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
|
||||
self._ItemType = -1
|
||||
continue
|
||||
#
|
||||
|
@ -877,7 +877,7 @@ class DscParser(MetaFileParser):
|
|||
|
||||
self._ValueList = ['', '', '']
|
||||
self._SectionParser[SectionType](self)
|
||||
if self._ValueList == None:
|
||||
if self._ValueList is None:
|
||||
continue
|
||||
#
|
||||
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
|
||||
|
@ -1197,7 +1197,7 @@ class DscParser(MetaFileParser):
|
|||
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
|
||||
Line=self._LineIndex+1)
|
||||
|
||||
if self._ValueList == None:
|
||||
if self._ValueList is None:
|
||||
continue
|
||||
|
||||
NewOwner = self._IdMapping.get(Owner, -1)
|
||||
|
@ -1573,7 +1573,7 @@ class DecParser(MetaFileParser):
|
|||
# section content
|
||||
self._ValueList = ['','','']
|
||||
self._SectionParser[self._SectionType[0]](self)
|
||||
if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
|
||||
if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
|
||||
self._ItemType = -1
|
||||
self._Comments = []
|
||||
continue
|
||||
|
@ -1932,7 +1932,7 @@ class Fdf(FdfObject):
|
|||
#
|
||||
# Load Fdf file if filename is not None
|
||||
#
|
||||
if Filename != None:
|
||||
if Filename is not None:
|
||||
try:
|
||||
self.LoadFdfFile(Filename)
|
||||
except Exception:
|
||||
|
|
|
@ -117,9 +117,9 @@ class ModuleTable(MetaFileTable):
|
|||
ConditionString = "Model=%s AND Enabled>=0" % Model
|
||||
ValueString = "Value1,Value2,Value3,Usage,Scope1,Scope2,ID,StartLine"
|
||||
|
||||
if Arch != None and Arch != 'COMMON':
|
||||
if Arch is not None and Arch != 'COMMON':
|
||||
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
|
||||
if Platform != None and Platform != 'COMMON':
|
||||
if Platform is not None and Platform != 'COMMON':
|
||||
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
|
||||
|
||||
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
|
||||
|
@ -198,7 +198,7 @@ class PackageTable(MetaFileTable):
|
|||
ConditionString = "Model=%s AND Enabled>=0" % Model
|
||||
ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"
|
||||
|
||||
if Arch != None and Arch != 'COMMON':
|
||||
if Arch is not None and Arch != 'COMMON':
|
||||
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
|
||||
|
||||
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
|
||||
|
@ -283,17 +283,17 @@ class PlatformTable(MetaFileTable):
|
|||
ConditionString = "Model=%s AND Enabled>0" % Model
|
||||
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
|
||||
|
||||
if Scope1 != None and Scope1 != 'COMMON':
|
||||
if Scope1 is not None and Scope1 != 'COMMON':
|
||||
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
|
||||
if Scope2 != None and Scope2 != 'COMMON':
|
||||
if Scope2 is not None and Scope2 != 'COMMON':
|
||||
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
|
||||
|
||||
if BelongsToItem != None:
|
||||
if BelongsToItem is not None:
|
||||
ConditionString += " AND BelongsToItem=%s" % BelongsToItem
|
||||
else:
|
||||
ConditionString += " AND BelongsToItem<0"
|
||||
|
||||
if FromItem != None:
|
||||
if FromItem is not None:
|
||||
ConditionString += " AND FromItem=%s" % FromItem
|
||||
|
||||
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
|
||||
|
|
|
@ -30,14 +30,14 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
|
|||
def CreateXmlElement(Name, String, NodeList, AttributeList):
|
||||
Doc = xml.dom.minidom.Document()
|
||||
Element = Doc.createElement(Name)
|
||||
if String != '' and String != None:
|
||||
if String != '' and String is not None:
|
||||
Element.appendChild(Doc.createTextNode(String))
|
||||
|
||||
for Item in NodeList:
|
||||
if type(Item) == type([]):
|
||||
Key = Item[0]
|
||||
Value = Item[1]
|
||||
if Key != '' and Key != None and Value != '' and Value != None:
|
||||
if Key != '' and Key is not None and Value != '' and Value is not None:
|
||||
Node = Doc.createElement(Key)
|
||||
Node.appendChild(Doc.createTextNode(Value))
|
||||
Element.appendChild(Node)
|
||||
|
@ -46,7 +46,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
|
|||
for Item in AttributeList:
|
||||
Key = Item[0]
|
||||
Value = Item[1]
|
||||
if Key != '' and Key != None and Value != '' and Value != None:
|
||||
if Key != '' and Key is not None and Value != '' and Value is not None:
|
||||
Element.setAttribute(Key, Value)
|
||||
|
||||
return Element
|
||||
|
@ -62,7 +62,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
|
|||
# @revel Nodes A list of XML nodes matching XPath style Sting.
|
||||
#
|
||||
def XmlList(Dom, String):
|
||||
if String == None or String == "" or Dom == None or Dom == "":
|
||||
if String is None or String == "" or Dom is None or Dom == "":
|
||||
return []
|
||||
if Dom.nodeType == Dom.DOCUMENT_NODE:
|
||||
Dom = Dom.documentElement
|
||||
|
@ -98,7 +98,7 @@ def XmlList(Dom, String):
|
|||
# @revel Node A single XML node matching XPath style Sting.
|
||||
#
|
||||
def XmlNode(Dom, String):
|
||||
if String == None or String == "" or Dom == None or Dom == "":
|
||||
if String is None or String == "" or Dom is None or Dom == "":
|
||||
return ""
|
||||
if Dom.nodeType == Dom.DOCUMENT_NODE:
|
||||
Dom = Dom.documentElement
|
||||
|
|
|
@ -550,7 +550,7 @@ def CollectSourceCodeDataIntoDB(RootDir):
|
|||
Db.UpdateIdentifierBelongsToFunction()
|
||||
|
||||
def GetTableID(FullFileName, ErrorMsgList=None):
|
||||
if ErrorMsgList == None:
|
||||
if ErrorMsgList is None:
|
||||
ErrorMsgList = []
|
||||
|
||||
Db = GetDB()
|
||||
|
@ -575,7 +575,7 @@ def GetIncludeFileList(FullFileName):
|
|||
if os.path.splitext(FullFileName)[1].upper() not in ('.H'):
|
||||
return []
|
||||
IFList = IncludeFileListDict.get(FullFileName)
|
||||
if IFList != None:
|
||||
if IFList is not None:
|
||||
return IFList
|
||||
|
||||
FileID = GetTableID(FullFileName)
|
||||
|
@ -601,12 +601,12 @@ def GetFullPathOfIncludeFile(Str, IncludePathList):
|
|||
return None
|
||||
|
||||
def GetAllIncludeFiles(FullFileName):
|
||||
if AllIncludeFileListDict.get(FullFileName) != None:
|
||||
if AllIncludeFileListDict.get(FullFileName) is not None:
|
||||
return AllIncludeFileListDict.get(FullFileName)
|
||||
|
||||
FileDirName = os.path.dirname(FullFileName)
|
||||
IncludePathList = IncludePathListDict.get(FileDirName)
|
||||
if IncludePathList == None:
|
||||
if IncludePathList is None:
|
||||
IncludePathList = MetaDataParser.GetIncludeListOfFile(EccGlobalData.gWorkspace, FullFileName, GetDB())
|
||||
if FileDirName not in IncludePathList:
|
||||
IncludePathList.insert(0, FileDirName)
|
||||
|
@ -618,7 +618,7 @@ def GetAllIncludeFiles(FullFileName):
|
|||
FileName = FileName.strip('\"')
|
||||
FileName = FileName.lstrip('<').rstrip('>').strip()
|
||||
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
|
||||
if FullPath != None:
|
||||
if FullPath is not None:
|
||||
IncludeFileQueue.append(FullPath)
|
||||
|
||||
i = 0
|
||||
|
@ -629,7 +629,7 @@ def GetAllIncludeFiles(FullFileName):
|
|||
FileName = FileName.strip('\"')
|
||||
FileName = FileName.lstrip('<').rstrip('>').strip()
|
||||
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
|
||||
if FullPath != None and FullPath not in IncludeFileQueue:
|
||||
if FullPath is not None and FullPath not in IncludeFileQueue:
|
||||
IncludeFileQueue.insert(i + 1, FullPath)
|
||||
i += 1
|
||||
|
||||
|
@ -853,7 +853,7 @@ def DiffModifier(Str1, Str2):
|
|||
def GetTypedefDict(FullFileName):
|
||||
|
||||
Dict = ComplexTypeDict.get(FullFileName)
|
||||
if Dict != None:
|
||||
if Dict is not None:
|
||||
return Dict
|
||||
|
||||
FileID = GetTableID(FullFileName)
|
||||
|
@ -898,7 +898,7 @@ def GetTypedefDict(FullFileName):
|
|||
def GetSUDict(FullFileName):
|
||||
|
||||
Dict = SUDict.get(FullFileName)
|
||||
if Dict != None:
|
||||
if Dict is not None:
|
||||
return Dict
|
||||
|
||||
FileID = GetTableID(FullFileName)
|
||||
|
@ -983,9 +983,9 @@ def StripComments(Str):
|
|||
|
||||
def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
|
||||
Value = TypedefDict.get(Type)
|
||||
if Value == None:
|
||||
if Value is None:
|
||||
Value = SUDict.get(Type)
|
||||
if Value == None:
|
||||
if Value is None:
|
||||
return None
|
||||
|
||||
LBPos = Value.find('{')
|
||||
|
@ -994,11 +994,11 @@ def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
|
|||
for FT in FTList:
|
||||
if FT not in ('struct', 'union'):
|
||||
Value = TypedefDict.get(FT)
|
||||
if Value == None:
|
||||
if Value is None:
|
||||
Value = SUDict.get(FT)
|
||||
break
|
||||
|
||||
if Value == None:
|
||||
if Value is None:
|
||||
return None
|
||||
|
||||
LBPos = Value.find('{')
|
||||
|
@ -1025,11 +1025,11 @@ def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
|
|||
return None
|
||||
|
||||
def GetRealType(Type, TypedefDict, TargetType=None):
|
||||
if TargetType != None and Type == TargetType:
|
||||
if TargetType is not None and Type == TargetType:
|
||||
return Type
|
||||
while TypedefDict.get(Type):
|
||||
Type = TypedefDict.get(Type)
|
||||
if TargetType != None and Type == TargetType:
|
||||
if TargetType is not None and Type == TargetType:
|
||||
return Type
|
||||
return Type
|
||||
|
||||
|
@ -1043,10 +1043,10 @@ def GetTypeInfo(RefList, Modifier, FullFileName, TargetType=None):
|
|||
while Index < len(RefList):
|
||||
FieldName = RefList[Index]
|
||||
FromType = GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict)
|
||||
if FromType == None:
|
||||
if FromType is None:
|
||||
return None
|
||||
# we want to determine the exact type.
|
||||
if TargetType != None:
|
||||
if TargetType is not None:
|
||||
Type = FromType.split()[0]
|
||||
# we only want to check if it is a pointer
|
||||
else:
|
||||
|
@ -1151,7 +1151,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
|
|||
# Type = GetDataTypeFromModifier(Result[0]).split()[-1]
|
||||
TypeList = GetDataTypeFromModifier(Result[0]).split()
|
||||
Type = TypeList[-1]
|
||||
if len(TypeList) > 1 and StarList != None:
|
||||
if len(TypeList) > 1 and StarList is not None:
|
||||
for Star in StarList:
|
||||
Type = Type.strip()
|
||||
Type = Type.rstrip(Star)
|
||||
|
@ -1174,7 +1174,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
|
|||
Type = TypeList[-1]
|
||||
if Type == '*' and len(TypeList) >= 2:
|
||||
Type = TypeList[-2]
|
||||
if len(TypeList) > 1 and StarList != None:
|
||||
if len(TypeList) > 1 and StarList is not None:
|
||||
for Star in StarList:
|
||||
Type = Type.strip()
|
||||
Type = Type.rstrip(Star)
|
||||
|
@ -1199,7 +1199,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
|
|||
else:
|
||||
TypeList = GetDataTypeFromModifier(Result[0]).split()
|
||||
Type = TypeList[-1]
|
||||
if len(TypeList) > 1 and StarList != None:
|
||||
if len(TypeList) > 1 and StarList is not None:
|
||||
for Star in StarList:
|
||||
Type = Type.strip()
|
||||
Type = Type.rstrip(Star)
|
||||
|
@ -1230,7 +1230,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
|
|||
else:
|
||||
TypeList = GetDataTypeFromModifier(Result[0]).split()
|
||||
Type = TypeList[-1]
|
||||
if len(TypeList) > 1 and StarList != None:
|
||||
if len(TypeList) > 1 and StarList is not None:
|
||||
for Star in StarList:
|
||||
Type = Type.strip()
|
||||
Type = Type.rstrip(Star)
|
||||
|
@ -1939,12 +1939,12 @@ def CheckPointerNullComparison(FullFileName):
|
|||
p = GetFuncDeclPattern()
|
||||
for Str in PSL:
|
||||
FuncRecord = GetFuncContainsPE(Str[1], FL)
|
||||
if FuncRecord == None:
|
||||
if FuncRecord is None:
|
||||
continue
|
||||
|
||||
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
|
||||
PredInfo = SplitPredicateStr(Exp)
|
||||
if PredInfo[1] == None:
|
||||
if PredInfo[1] is None:
|
||||
PredVarStr = PredInfo[0][0].strip()
|
||||
IsFuncCall = False
|
||||
SearchInCache = False
|
||||
|
@ -1966,7 +1966,7 @@ def CheckPointerNullComparison(FullFileName):
|
|||
continue
|
||||
if SearchInCache:
|
||||
Type = FuncReturnTypeDict.get(PredVarStr)
|
||||
if Type != None:
|
||||
if Type is not None:
|
||||
if Type.find('*') != -1 and Type != 'BOOLEAN*':
|
||||
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
|
||||
continue
|
||||
|
@ -1977,7 +1977,7 @@ def CheckPointerNullComparison(FullFileName):
|
|||
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, None, StarList)
|
||||
if SearchInCache:
|
||||
FuncReturnTypeDict[PredVarStr] = Type
|
||||
if Type == None:
|
||||
if Type is None:
|
||||
continue
|
||||
Type = GetTypeFromArray(Type, PredVarStr)
|
||||
if Type.find('*') != -1 and Type != 'BOOLEAN*':
|
||||
|
@ -2018,12 +2018,12 @@ def CheckNonBooleanValueComparison(FullFileName):
|
|||
p = GetFuncDeclPattern()
|
||||
for Str in PSL:
|
||||
FuncRecord = GetFuncContainsPE(Str[1], FL)
|
||||
if FuncRecord == None:
|
||||
if FuncRecord is None:
|
||||
continue
|
||||
|
||||
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
|
||||
PredInfo = SplitPredicateStr(Exp)
|
||||
if PredInfo[1] == None:
|
||||
if PredInfo[1] is None:
|
||||
PredVarStr = PredInfo[0][0].strip()
|
||||
IsFuncCall = False
|
||||
SearchInCache = False
|
||||
|
@ -2046,7 +2046,7 @@ def CheckNonBooleanValueComparison(FullFileName):
|
|||
|
||||
if SearchInCache:
|
||||
Type = FuncReturnTypeDict.get(PredVarStr)
|
||||
if Type != None:
|
||||
if Type is not None:
|
||||
if Type.find('BOOLEAN') == -1:
|
||||
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
|
||||
continue
|
||||
|
@ -2056,7 +2056,7 @@ def CheckNonBooleanValueComparison(FullFileName):
|
|||
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
|
||||
if SearchInCache:
|
||||
FuncReturnTypeDict[PredVarStr] = Type
|
||||
if Type == None:
|
||||
if Type is None:
|
||||
continue
|
||||
if Type.find('BOOLEAN') == -1:
|
||||
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
|
||||
|
@ -2097,7 +2097,7 @@ def CheckBooleanValueComparison(FullFileName):
|
|||
p = GetFuncDeclPattern()
|
||||
for Str in PSL:
|
||||
FuncRecord = GetFuncContainsPE(Str[1], FL)
|
||||
if FuncRecord == None:
|
||||
if FuncRecord is None:
|
||||
continue
|
||||
|
||||
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
|
||||
|
@ -2125,7 +2125,7 @@ def CheckBooleanValueComparison(FullFileName):
|
|||
|
||||
if SearchInCache:
|
||||
Type = FuncReturnTypeDict.get(PredVarStr)
|
||||
if Type != None:
|
||||
if Type is not None:
|
||||
if Type.find('BOOLEAN') != -1:
|
||||
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
|
||||
continue
|
||||
|
@ -2136,7 +2136,7 @@ def CheckBooleanValueComparison(FullFileName):
|
|||
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
|
||||
if SearchInCache:
|
||||
FuncReturnTypeDict[PredVarStr] = Type
|
||||
if Type == None:
|
||||
if Type is None:
|
||||
continue
|
||||
if Type.find('BOOLEAN') != -1:
|
||||
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
|
||||
|
|
|
@ -783,14 +783,14 @@ class CParser(Parser):
|
|||
|
||||
if self.backtracking == 0:
|
||||
|
||||
if d != None:
|
||||
if d is not None:
|
||||
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)
|
||||
else:
|
||||
self.function_definition_stack[-1].ModifierText = ''
|
||||
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)
|
||||
self.function_definition_stack[-1].DeclLine = declarator1.start.line
|
||||
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
|
||||
if a != None:
|
||||
if a is not None:
|
||||
self.function_definition_stack[-1].LBLine = a.start.line
|
||||
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
|
||||
else:
|
||||
|
@ -920,7 +920,7 @@ class CParser(Parser):
|
|||
return
|
||||
if self.backtracking == 0:
|
||||
|
||||
if b != None:
|
||||
if b is not None:
|
||||
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))
|
||||
else:
|
||||
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))
|
||||
|
@ -957,7 +957,7 @@ class CParser(Parser):
|
|||
return
|
||||
if self.backtracking == 0:
|
||||
|
||||
if t != None:
|
||||
if t is not None:
|
||||
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))
|
||||
|
||||
|
||||
|
@ -1401,7 +1401,7 @@ class CParser(Parser):
|
|||
return
|
||||
if self.backtracking == 0:
|
||||
|
||||
if s.stop != None:
|
||||
if s.stop is not None:
|
||||
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))
|
||||
|
||||
|
||||
|
@ -1416,7 +1416,7 @@ class CParser(Parser):
|
|||
return
|
||||
if self.backtracking == 0:
|
||||
|
||||
if e.stop != None:
|
||||
if e.stop is not None:
|
||||
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))
|
||||
|
||||
|
||||
|
|
|
@ -291,7 +291,7 @@ class CodeFragmentCollector:
|
|||
InCharLiteral = not InCharLiteral
|
||||
# meet new line, then no longer in a comment for // and '#'
|
||||
if self.__CurrentChar() == T_CHAR_LF:
|
||||
if HashComment and PPDirectiveObj != None:
|
||||
if HashComment and PPDirectiveObj is not None:
|
||||
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
|
||||
PPDirectiveObj.Content += T_CHAR_LF
|
||||
PPExtend = True
|
||||
|
|
|
@ -579,11 +579,11 @@ class Eot(object):
|
|||
# @param Option: The option list including log level setting
|
||||
#
|
||||
def SetLogLevel(self, Option):
|
||||
if Option.verbose != None:
|
||||
if Option.verbose is not None:
|
||||
EdkLogger.SetLevel(EdkLogger.VERBOSE)
|
||||
elif Option.quiet != None:
|
||||
elif Option.quiet is not None:
|
||||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
elif Option.debug != None:
|
||||
elif Option.debug is not None:
|
||||
EdkLogger.SetLevel(Option.debug + 1)
|
||||
else:
|
||||
EdkLogger.SetLevel(EdkLogger.INFO)
|
||||
|
|
|
@ -52,7 +52,7 @@ class Image(array):
|
|||
return array.__new__(cls, 'B')
|
||||
|
||||
def __init__(m, ID=None):
|
||||
if ID == None:
|
||||
if ID is None:
|
||||
m._ID_ = str(uuid.uuid1()).upper()
|
||||
else:
|
||||
m._ID_ = ID
|
||||
|
@ -208,7 +208,7 @@ class FirmwareVolume(Image):
|
|||
return (CouldBeLoaded, DepexString, FileDepex)
|
||||
|
||||
def Dispatch(self, Db = None):
|
||||
if Db == None:
|
||||
if Db is None:
|
||||
return False
|
||||
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
|
||||
# Find PeiCore, DexCore, PeiPriori, DxePriori first
|
||||
|
@ -236,15 +236,15 @@ class FirmwareVolume(Image):
|
|||
continue
|
||||
|
||||
# Parse SEC_CORE first
|
||||
if FfsSecCoreGuid != None:
|
||||
if FfsSecCoreGuid is not None:
|
||||
self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid)
|
||||
self.LoadPpi(Db, FfsSecCoreGuid)
|
||||
|
||||
# Parse PEI first
|
||||
if FfsPeiCoreGuid != None:
|
||||
if FfsPeiCoreGuid is not None:
|
||||
self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid)
|
||||
self.LoadPpi(Db, FfsPeiCoreGuid)
|
||||
if FfsPeiPrioriGuid != None:
|
||||
if FfsPeiPrioriGuid is not None:
|
||||
# Load PEIM described in priori file
|
||||
FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid)
|
||||
if len(FfsPeiPriori.Sections) == 1:
|
||||
|
@ -263,10 +263,10 @@ class FirmwareVolume(Image):
|
|||
self.DisPatchPei(Db)
|
||||
|
||||
# Parse DXE then
|
||||
if FfsDxeCoreGuid != None:
|
||||
if FfsDxeCoreGuid is not None:
|
||||
self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid)
|
||||
self.LoadProtocol(Db, FfsDxeCoreGuid)
|
||||
if FfsDxePrioriGuid != None:
|
||||
if FfsDxePrioriGuid is not None:
|
||||
# Load PEIM described in priori file
|
||||
FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid)
|
||||
if len(FfsDxePriori.Sections) == 1:
|
||||
|
@ -383,7 +383,7 @@ class FirmwareVolume(Image):
|
|||
IsInstalled = True
|
||||
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
|
||||
NewFfs.Depex = DepexString
|
||||
if FileDepex != None:
|
||||
if FileDepex is not None:
|
||||
ScheduleList.insert.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0])
|
||||
else:
|
||||
ScheduleList[FfsID] = NewFfs
|
||||
|
@ -471,7 +471,7 @@ class FirmwareVolume(Image):
|
|||
FfsId = repr(FfsObj)
|
||||
if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \
|
||||
or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0):
|
||||
if LastFfsObj != None:
|
||||
if LastFfsObj is not None:
|
||||
LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj)
|
||||
else:
|
||||
if FfsId in self.FfsDict:
|
||||
|
@ -480,7 +480,7 @@ class FirmwareVolume(Image):
|
|||
% (FfsObj.Guid, FfsObj.Offset,
|
||||
self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset))
|
||||
self.FfsDict[FfsId] = FfsObj
|
||||
if LastFfsObj != None:
|
||||
if LastFfsObj is not None:
|
||||
LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj)
|
||||
|
||||
FfsStartAddress += len(FfsObj)
|
||||
|
@ -527,11 +527,11 @@ class CompressedImage(Image):
|
|||
|
||||
def __init__(m, CompressedData=None, CompressionType=None, UncompressedLength=None):
|
||||
Image.__init__(m)
|
||||
if UncompressedLength != None:
|
||||
if UncompressedLength is not None:
|
||||
m.UncompressedLength = UncompressedLength
|
||||
if CompressionType != None:
|
||||
if CompressionType is not None:
|
||||
m.CompressionType = CompressionType
|
||||
if CompressedData != None:
|
||||
if CompressedData is not None:
|
||||
m.Data = CompressedData
|
||||
|
||||
def __str__(m):
|
||||
|
@ -607,13 +607,13 @@ class GuidDefinedImage(Image):
|
|||
|
||||
def __init__(m, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None):
|
||||
Image.__init__(m)
|
||||
if SectionDefinitionGuid != None:
|
||||
if SectionDefinitionGuid is not None:
|
||||
m.SectionDefinitionGuid = SectionDefinitionGuid
|
||||
if DataOffset != None:
|
||||
if DataOffset is not None:
|
||||
m.DataOffset = DataOffset
|
||||
if Attributes != None:
|
||||
if Attributes is not None:
|
||||
m.Attributes = Attributes
|
||||
if Data != None:
|
||||
if Data is not None:
|
||||
m.Data = Data
|
||||
|
||||
def __str__(m):
|
||||
|
@ -791,7 +791,7 @@ class Depex(Image):
|
|||
else:
|
||||
CurrentData = m._OPCODE_
|
||||
m._ExprList.append(Token)
|
||||
if CurrentData == None:
|
||||
if CurrentData is None:
|
||||
break
|
||||
return m._ExprList
|
||||
|
||||
|
@ -867,9 +867,9 @@ class Section(Image):
|
|||
def __init__(m, Type=None, Size=None):
|
||||
Image.__init__(m)
|
||||
m._Alignment = 1
|
||||
if Type != None:
|
||||
if Type is not None:
|
||||
m.Type = Type
|
||||
if Size != None:
|
||||
if Size is not None:
|
||||
m.Size = Size
|
||||
|
||||
def __str__(m):
|
||||
|
@ -1283,7 +1283,7 @@ class LinkMap:
|
|||
for Line in MapFile:
|
||||
Line = Line.strip()
|
||||
if not MappingStart:
|
||||
if MappingTitle.match(Line) != None:
|
||||
if MappingTitle.match(Line) is not None:
|
||||
MappingStart = True
|
||||
continue
|
||||
ResultList = MappingFormat.findall(Line)
|
||||
|
|
|
@ -52,7 +52,7 @@ class EdkInfParser(object):
|
|||
self.SourceOverridePath = SourceOverridePath
|
||||
|
||||
# Load Inf file if filename is not None
|
||||
if Filename != None:
|
||||
if Filename is not None:
|
||||
self.LoadInfFile(Filename)
|
||||
|
||||
if SourceFileList:
|
||||
|
|
|
@ -234,7 +234,7 @@ class Report(object):
|
|||
#
|
||||
def GenerateFfs(self, FfsObj):
|
||||
self.FfsIndex = self.FfsIndex + 1
|
||||
if FfsObj != None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
|
||||
if FfsObj is not None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
|
||||
FfsGuid = FfsObj.Guid
|
||||
FfsOffset = FfsObj._OFF_
|
||||
FfsName = 'Unknown-Module'
|
||||
|
|
|
@ -75,11 +75,11 @@ class AprioriSection (AprioriSectionClassObject):
|
|||
InfFileName = NormPath(FfsObj.InfFileName)
|
||||
Arch = FfsObj.GetCurrentArch()
|
||||
|
||||
if Arch != None:
|
||||
if Arch is not None:
|
||||
Dict['$(ARCH)'] = Arch
|
||||
InfFileName = GenFdsGlobalVariable.MacroExtend(InfFileName, Dict, Arch)
|
||||
|
||||
if Arch != None:
|
||||
if Arch is not None:
|
||||
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
|
||||
Guid = Inf.Guid
|
||||
|
||||
|
|
|
@ -159,7 +159,7 @@ class Capsule (CapsuleClassObject) :
|
|||
if not os.path.isabs(fmp.ImageFile):
|
||||
CapInputFile = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, fmp.ImageFile)
|
||||
CapOutputTmp = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName) + '.tmp'
|
||||
if ExternalTool == None:
|
||||
if ExternalTool is None:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % fmp.Certificate_Guid)
|
||||
else:
|
||||
CmdOption += ExternalTool
|
||||
|
|
|
@ -55,7 +55,7 @@ class CompressSection (CompressSectionClassObject) :
|
|||
#
|
||||
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False):
|
||||
|
||||
if FfsInf != None:
|
||||
if FfsInf is not None:
|
||||
self.CompType = FfsInf.__ExtendMacro__(self.CompType)
|
||||
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
|
||||
|
||||
|
@ -67,13 +67,13 @@ class CompressSection (CompressSectionClassObject) :
|
|||
Index = Index + 1
|
||||
SecIndex = '%s.%d' %(SecNum, Index)
|
||||
ReturnSectList, AlignValue = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict, IsMakefile=IsMakefile)
|
||||
if AlignValue != None:
|
||||
if MaxAlign == None:
|
||||
if AlignValue is not None:
|
||||
if MaxAlign is None:
|
||||
MaxAlign = AlignValue
|
||||
if GenFdsGlobalVariable.GetAlignment (AlignValue) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
|
||||
MaxAlign = AlignValue
|
||||
if ReturnSectList != []:
|
||||
if AlignValue == None:
|
||||
if AlignValue is None:
|
||||
AlignValue = "1"
|
||||
for FileData in ReturnSectList:
|
||||
SectFiles += (FileData,)
|
||||
|
|
|
@ -52,7 +52,7 @@ class DataSection (DataSectionClassObject):
|
|||
#
|
||||
# Prepare the parameter of GenSection
|
||||
#
|
||||
if FfsFile != None:
|
||||
if FfsFile is not None:
|
||||
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
|
||||
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict, FfsFile.CurrentArch)
|
||||
else:
|
||||
|
@ -92,7 +92,7 @@ class DataSection (DataSectionClassObject):
|
|||
|
||||
NoStrip = True
|
||||
if self.SecType in ('TE', 'PE32'):
|
||||
if self.KeepReloc != None:
|
||||
if self.KeepReloc is not None:
|
||||
NoStrip = self.KeepReloc
|
||||
|
||||
if not NoStrip:
|
||||
|
|
|
@ -86,7 +86,7 @@ class DepexSection (DepexSectionClassObject):
|
|||
for Exp in ExpList:
|
||||
if Exp.upper() not in ('AND', 'OR', 'NOT', 'TRUE', 'FALSE', 'SOR', 'BEFORE', 'AFTER', 'END'):
|
||||
GuidStr = self.__FindGuidValue(Exp)
|
||||
if GuidStr == None:
|
||||
if GuidStr is None:
|
||||
EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE,
|
||||
"Depex GUID %s could not be found in build DB! (ModuleName: %s)" % (Exp, ModuleName))
|
||||
|
||||
|
|
|
@ -55,10 +55,10 @@ class EfiSection (EfiSectionClassObject):
|
|||
#
|
||||
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False) :
|
||||
|
||||
if self.FileName != None and self.FileName.startswith('PCD('):
|
||||
if self.FileName is not None and self.FileName.startswith('PCD('):
|
||||
self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)
|
||||
"""Prepare the parameter of GenSection"""
|
||||
if FfsInf != None :
|
||||
if FfsInf is not None :
|
||||
InfFileName = FfsInf.InfFileName
|
||||
SectionType = FfsInf.__ExtendMacro__(self.SectionType)
|
||||
Filename = FfsInf.__ExtendMacro__(self.FileName)
|
||||
|
@ -66,20 +66,20 @@ class EfiSection (EfiSectionClassObject):
|
|||
StringData = FfsInf.__ExtendMacro__(self.StringData)
|
||||
NoStrip = True
|
||||
if FfsInf.ModuleType in ('SEC', 'PEI_CORE', 'PEIM') and SectionType in ('TE', 'PE32'):
|
||||
if FfsInf.KeepReloc != None:
|
||||
if FfsInf.KeepReloc is not None:
|
||||
NoStrip = FfsInf.KeepReloc
|
||||
elif FfsInf.KeepRelocFromRule != None:
|
||||
elif FfsInf.KeepRelocFromRule is not None:
|
||||
NoStrip = FfsInf.KeepRelocFromRule
|
||||
elif self.KeepReloc != None:
|
||||
elif self.KeepReloc is not None:
|
||||
NoStrip = self.KeepReloc
|
||||
elif FfsInf.ShadowFromInfFile != None:
|
||||
elif FfsInf.ShadowFromInfFile is not None:
|
||||
NoStrip = FfsInf.ShadowFromInfFile
|
||||
else:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s apply rule for None!" %ModuleName)
|
||||
|
||||
"""If the file name was pointed out, add it in FileList"""
|
||||
FileList = []
|
||||
if Filename != None:
|
||||
if Filename is not None:
|
||||
Filename = GenFdsGlobalVariable.MacroExtend(Filename, Dict)
|
||||
# check if the path is absolute or relative
|
||||
if os.path.isabs(Filename):
|
||||
|
@ -107,14 +107,14 @@ class EfiSection (EfiSectionClassObject):
|
|||
if SectionType == 'VERSION':
|
||||
|
||||
InfOverrideVerString = False
|
||||
if FfsInf.Version != None:
|
||||
if FfsInf.Version is not None:
|
||||
#StringData = FfsInf.Version
|
||||
BuildNum = FfsInf.Version
|
||||
InfOverrideVerString = True
|
||||
|
||||
if InfOverrideVerString:
|
||||
#VerTuple = ('-n', '"' + StringData + '"')
|
||||
if BuildNum != None and BuildNum != '':
|
||||
if BuildNum is not None and BuildNum != '':
|
||||
BuildNumTuple = ('-j', BuildNum)
|
||||
else:
|
||||
BuildNumTuple = tuple()
|
||||
|
@ -136,7 +136,7 @@ class EfiSection (EfiSectionClassObject):
|
|||
VerString = f.read()
|
||||
f.close()
|
||||
BuildNum = VerString
|
||||
if BuildNum != None and BuildNum != '':
|
||||
if BuildNum is not None and BuildNum != '':
|
||||
BuildNumTuple = ('-j', BuildNum)
|
||||
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
|
||||
#Ui=VerString,
|
||||
|
@ -146,7 +146,7 @@ class EfiSection (EfiSectionClassObject):
|
|||
|
||||
else:
|
||||
BuildNum = StringData
|
||||
if BuildNum != None and BuildNum != '':
|
||||
if BuildNum is not None and BuildNum != '':
|
||||
BuildNumTuple = ('-j', BuildNum)
|
||||
else:
|
||||
BuildNumTuple = tuple()
|
||||
|
@ -173,7 +173,7 @@ class EfiSection (EfiSectionClassObject):
|
|||
elif SectionType == 'UI':
|
||||
|
||||
InfOverrideUiString = False
|
||||
if FfsInf.Ui != None:
|
||||
if FfsInf.Ui is not None:
|
||||
StringData = FfsInf.Ui
|
||||
InfOverrideUiString = True
|
||||
|
||||
|
@ -196,7 +196,7 @@ class EfiSection (EfiSectionClassObject):
|
|||
Ui=UiString, IsMakefile=IsMakefile)
|
||||
OutputFileList.append(OutputFile)
|
||||
else:
|
||||
if StringData != None and len(StringData) > 0:
|
||||
if StringData is not None and len(StringData) > 0:
|
||||
UiTuple = ('-n', '"' + StringData + '"')
|
||||
else:
|
||||
UiTuple = tuple()
|
||||
|
|
|
@ -639,7 +639,7 @@ class FdfParser:
|
|||
if not MacroVal:
|
||||
if Macro in MacroDict:
|
||||
MacroVal = MacroDict[Macro]
|
||||
if MacroVal != None:
|
||||
if MacroVal is not None:
|
||||
IncFileName = IncFileName.replace('$(' + Macro + ')', MacroVal, 1)
|
||||
if MacroVal.find('$(') != -1:
|
||||
PreIndex = StartPos
|
||||
|
@ -687,7 +687,7 @@ class FdfParser:
|
|||
# list index of the insertion, note that line number is 'CurrentLine + 1'
|
||||
InsertAtLine = CurrentLine
|
||||
ParentProfile = GetParentAtLine (CurrentLine)
|
||||
if ParentProfile != None:
|
||||
if ParentProfile is not None:
|
||||
ParentProfile.IncludeFileList.insert(0, IncFileProfile)
|
||||
IncFileProfile.Level = ParentProfile.Level + 1
|
||||
IncFileProfile.InsertStartLineNumber = InsertAtLine + 1
|
||||
|
@ -763,7 +763,7 @@ class FdfParser:
|
|||
while StartPos != -1 and EndPos != -1 and self.__Token not in ['!ifdef', '!ifndef', '!if', '!elseif']:
|
||||
MacroName = CurLine[StartPos+2 : EndPos]
|
||||
MacorValue = self.__GetMacroValue(MacroName)
|
||||
if MacorValue != None:
|
||||
if MacorValue is not None:
|
||||
CurLine = CurLine.replace('$(' + MacroName + ')', MacorValue, 1)
|
||||
if MacorValue.find('$(') != -1:
|
||||
PreIndex = StartPos
|
||||
|
@ -1136,7 +1136,7 @@ class FdfParser:
|
|||
|
||||
if not self.__GetNextToken():
|
||||
return False
|
||||
if gGuidPattern.match(self.__Token) != None:
|
||||
if gGuidPattern.match(self.__Token) is not None:
|
||||
return True
|
||||
else:
|
||||
self.__UndoToken()
|
||||
|
@ -1412,7 +1412,7 @@ class FdfParser:
|
|||
#'\n\tGot Token: \"%s\" from File %s\n' % (self.__Token, FileLineTuple[0]) + \
|
||||
# At this point, the closest parent would be the included file itself
|
||||
Profile = GetParentAtLine(X.OriginalLineNumber)
|
||||
if Profile != None:
|
||||
if Profile is not None:
|
||||
X.Message += ' near line %d, column %d: %s' \
|
||||
% (X.LineNumber, 0, Profile.FileLinesList[X.LineNumber-1])
|
||||
else:
|
||||
|
@ -1540,7 +1540,7 @@ class FdfParser:
|
|||
while self.__GetTokenStatements(FdObj):
|
||||
pass
|
||||
for Attr in ("BaseAddress", "Size", "ErasePolarity"):
|
||||
if getattr(FdObj, Attr) == None:
|
||||
if getattr(FdObj, Attr) is None:
|
||||
self.__GetNextToken()
|
||||
raise Warning("Keyword %s missing" % Attr, self.FileName, self.CurrentLineNumber)
|
||||
|
||||
|
@ -1695,7 +1695,7 @@ class FdfParser:
|
|||
IsBlock = True
|
||||
|
||||
Item = Obj.BlockSizeList[-1]
|
||||
if Item[0] == None or Item[1] == None:
|
||||
if Item[0] is None or Item[1] is None:
|
||||
raise Warning("expected block statement", self.FileName, self.CurrentLineNumber)
|
||||
return IsBlock
|
||||
|
||||
|
@ -1863,7 +1863,7 @@ class FdfParser:
|
|||
#
|
||||
def __GetRegionLayout(self, Fd):
|
||||
Offset = self.__CalcRegionExpr()
|
||||
if Offset == None:
|
||||
if Offset is None:
|
||||
return False
|
||||
|
||||
RegionObj = Region.Region()
|
||||
|
@ -1874,7 +1874,7 @@ class FdfParser:
|
|||
raise Warning("expected '|'", self.FileName, self.CurrentLineNumber)
|
||||
|
||||
Size = self.__CalcRegionExpr()
|
||||
if Size == None:
|
||||
if Size is None:
|
||||
raise Warning("expected Region Size", self.FileName, self.CurrentLineNumber)
|
||||
RegionObj.Size = Size
|
||||
|
||||
|
@ -2974,7 +2974,7 @@ class FdfParser:
|
|||
|
||||
FvImageSectionObj = FvImageSection.FvImageSection()
|
||||
FvImageSectionObj.Alignment = AlignValue
|
||||
if FvObj != None:
|
||||
if FvObj is not None:
|
||||
FvImageSectionObj.Fv = FvObj
|
||||
FvImageSectionObj.FvName = None
|
||||
else:
|
||||
|
@ -3791,7 +3791,7 @@ class FdfParser:
|
|||
Rule.CheckSum = CheckSum
|
||||
Rule.Fixed = Fixed
|
||||
Rule.KeyStringList = KeyStringList
|
||||
if KeepReloc != None:
|
||||
if KeepReloc is not None:
|
||||
Rule.KeepReloc = KeepReloc
|
||||
|
||||
while True:
|
||||
|
@ -3847,7 +3847,7 @@ class FdfParser:
|
|||
Rule.CheckSum = CheckSum
|
||||
Rule.Fixed = Fixed
|
||||
Rule.KeyStringList = KeyStringList
|
||||
if KeepReloc != None:
|
||||
if KeepReloc is not None:
|
||||
Rule.KeepReloc = KeepReloc
|
||||
Rule.FileExtension = Ext
|
||||
Rule.FileName = self.__Token
|
||||
|
@ -3986,7 +3986,7 @@ class FdfParser:
|
|||
EfiSectionObj.KeepReloc = False
|
||||
else:
|
||||
EfiSectionObj.KeepReloc = True
|
||||
if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
|
||||
if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
|
||||
raise Warning("Section type %s has reloc strip flag conflict with Rule" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
|
||||
else:
|
||||
raise Warning("Section type %s could not have reloc strip flag" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
|
||||
|
@ -4313,7 +4313,7 @@ class FdfParser:
|
|||
raise Warning("expected Component version", self.FileName, self.CurrentLineNumber)
|
||||
|
||||
Pattern = re.compile('-$|[0-9a-fA-F]{1,2}\.[0-9a-fA-F]{1,2}$', re.DOTALL)
|
||||
if Pattern.match(self.__Token) == None:
|
||||
if Pattern.match(self.__Token) is None:
|
||||
raise Warning("Unknown version format '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
|
||||
CompStatementObj.CompVer = self.__Token
|
||||
|
||||
|
@ -4577,7 +4577,7 @@ class FdfParser:
|
|||
for elementRegionData in elementRegion.RegionDataList:
|
||||
if elementRegionData.endswith(".cap"):
|
||||
continue
|
||||
if elementRegionData != None and elementRegionData.upper() not in CapList:
|
||||
if elementRegionData is not None and elementRegionData.upper() not in CapList:
|
||||
CapList.append(elementRegionData.upper())
|
||||
return CapList
|
||||
|
||||
|
@ -4593,15 +4593,15 @@ class FdfParser:
|
|||
def __GetReferencedFdCapTuple(self, CapObj, RefFdList = [], RefFvList = []):
|
||||
|
||||
for CapsuleDataObj in CapObj.CapsuleDataList :
|
||||
if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName != None and CapsuleDataObj.FvName.upper() not in RefFvList:
|
||||
if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName is not None and CapsuleDataObj.FvName.upper() not in RefFvList:
|
||||
RefFvList.append (CapsuleDataObj.FvName.upper())
|
||||
elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName != None and CapsuleDataObj.FdName.upper() not in RefFdList:
|
||||
elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName is not None and CapsuleDataObj.FdName.upper() not in RefFdList:
|
||||
RefFdList.append (CapsuleDataObj.FdName.upper())
|
||||
elif CapsuleDataObj.Ffs != None:
|
||||
elif CapsuleDataObj.Ffs is not None:
|
||||
if isinstance(CapsuleDataObj.Ffs, FfsFileStatement.FileStatement):
|
||||
if CapsuleDataObj.Ffs.FvName != None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
|
||||
if CapsuleDataObj.Ffs.FvName is not None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
|
||||
RefFvList.append(CapsuleDataObj.Ffs.FvName.upper())
|
||||
elif CapsuleDataObj.Ffs.FdName != None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:
|
||||
elif CapsuleDataObj.Ffs.FdName is not None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:
|
||||
RefFdList.append(CapsuleDataObj.Ffs.FdName.upper())
|
||||
else:
|
||||
self.__GetReferencedFdFvTupleFromSection(CapsuleDataObj.Ffs, RefFdList, RefFvList)
|
||||
|
@ -4624,7 +4624,7 @@ class FdfParser:
|
|||
for elementRegionData in elementRegion.RegionDataList:
|
||||
if elementRegionData.endswith(".fv"):
|
||||
continue
|
||||
if elementRegionData != None and elementRegionData.upper() not in FvList:
|
||||
if elementRegionData is not None and elementRegionData.upper() not in FvList:
|
||||
FvList.append(elementRegionData.upper())
|
||||
return FvList
|
||||
|
||||
|
@ -4641,9 +4641,9 @@ class FdfParser:
|
|||
|
||||
for FfsObj in FvObj.FfsList:
|
||||
if isinstance(FfsObj, FfsFileStatement.FileStatement):
|
||||
if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
|
||||
if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:
|
||||
RefFvList.append(FfsObj.FvName.upper())
|
||||
elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
|
||||
elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:
|
||||
RefFdList.append(FfsObj.FdName.upper())
|
||||
else:
|
||||
self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
|
||||
|
@ -4664,9 +4664,9 @@ class FdfParser:
|
|||
while SectionStack != []:
|
||||
SectionObj = SectionStack.pop()
|
||||
if isinstance(SectionObj, FvImageSection.FvImageSection):
|
||||
if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
|
||||
if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:
|
||||
FvList.append(SectionObj.FvName.upper())
|
||||
if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
|
||||
if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:
|
||||
FvList.append(SectionObj.Fv.UiFvName.upper())
|
||||
self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ class FileStatement (FileStatementClassObject) :
|
|||
#
|
||||
def GenFfs(self, Dict = {}, FvChildAddr=[], FvParentAddr=None, IsMakefile=False, FvName=None):
|
||||
|
||||
if self.NameGuid != None and self.NameGuid.startswith('PCD('):
|
||||
if self.NameGuid is not None and self.NameGuid.startswith('PCD('):
|
||||
PcdValue = GenFdsGlobalVariable.GetPcdValue(self.NameGuid)
|
||||
if len(PcdValue) == 0:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
|
||||
|
@ -81,7 +81,7 @@ class FileStatement (FileStatementClassObject) :
|
|||
|
||||
Dict.update(self.DefineVarDict)
|
||||
SectionAlignments = None
|
||||
if self.FvName != None :
|
||||
if self.FvName is not None :
|
||||
Buffer = StringIO.StringIO('')
|
||||
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))
|
||||
|
@ -89,14 +89,14 @@ class FileStatement (FileStatementClassObject) :
|
|||
FileName = Fv.AddToBuffer(Buffer)
|
||||
SectionFiles = [FileName]
|
||||
|
||||
elif self.FdName != None:
|
||||
elif self.FdName is not None:
|
||||
if self.FdName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "FD (%s) is NOT described in FDF file!" % (self.FdName))
|
||||
Fd = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())
|
||||
FileName = Fd.GenFd()
|
||||
SectionFiles = [FileName]
|
||||
|
||||
elif self.FileName != None:
|
||||
elif self.FileName is not None:
|
||||
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':
|
||||
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):
|
||||
FileContent = ''
|
||||
|
@ -110,7 +110,7 @@ class FileStatement (FileStatementClassObject) :
|
|||
Content = f.read()
|
||||
f.close()
|
||||
AlignValue = 1
|
||||
if self.SubAlignment[Index] != None:
|
||||
if self.SubAlignment[Index] is not None:
|
||||
AlignValue = GenFdsGlobalVariable.GetAlignment(self.SubAlignment[Index])
|
||||
if AlignValue > MaxAlignValue:
|
||||
MaxAlignIndex = Index
|
||||
|
@ -151,7 +151,7 @@ class FileStatement (FileStatementClassObject) :
|
|||
section.FvAddr = FvChildAddr.pop(0)
|
||||
elif isinstance(section, GuidSection):
|
||||
section.FvAddr = FvChildAddr
|
||||
if FvParentAddr != None and isinstance(section, GuidSection):
|
||||
if FvParentAddr is not None and isinstance(section, GuidSection):
|
||||
section.FvParentAddr = FvParentAddr
|
||||
|
||||
if self.KeepReloc == False:
|
||||
|
|
|
@ -185,7 +185,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
InfLowerPath = str(PathClassObj).lower()
|
||||
if self.OverrideGuid:
|
||||
PathClassObj = ProcessDuplicatedInf(PathClassObj, self.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir)
|
||||
if self.CurrentArch != None:
|
||||
if self.CurrentArch is not None:
|
||||
|
||||
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
|
||||
#
|
||||
|
@ -194,14 +194,14 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
self.BaseName = Inf.BaseName
|
||||
self.ModuleGuid = Inf.Guid
|
||||
self.ModuleType = Inf.ModuleType
|
||||
if Inf.Specification != None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
|
||||
if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
|
||||
self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
|
||||
if Inf.AutoGenVersion < 0x00010005:
|
||||
self.ModuleType = Inf.ComponentType
|
||||
self.VersionString = Inf.Version
|
||||
self.BinFileList = Inf.Binaries
|
||||
self.SourceFileList = Inf.Sources
|
||||
if self.KeepReloc == None and Inf.Shadow:
|
||||
if self.KeepReloc is None and Inf.Shadow:
|
||||
self.ShadowFromInfFile = Inf.Shadow
|
||||
|
||||
else:
|
||||
|
@ -209,7 +209,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
self.BaseName = Inf.BaseName
|
||||
self.ModuleGuid = Inf.Guid
|
||||
self.ModuleType = Inf.ModuleType
|
||||
if Inf.Specification != None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
|
||||
if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
|
||||
self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
|
||||
self.VersionString = Inf.Version
|
||||
self.BinFileList = Inf.Binaries
|
||||
|
@ -231,7 +231,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
if self.ModuleType == 'MM_CORE_STANDALONE' and int(self.PiSpecVersion, 16) < 0x00010032:
|
||||
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.InfFileName)
|
||||
|
||||
if Inf._Defs != None and len(Inf._Defs) > 0:
|
||||
if Inf._Defs is not None and len(Inf._Defs) > 0:
|
||||
self.OptRomDefs.update(Inf._Defs)
|
||||
|
||||
self.PatchPcds = []
|
||||
|
@ -476,7 +476,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
# Allow binary type module not specify override rule in FDF file.
|
||||
#
|
||||
if len(self.BinFileList) > 0:
|
||||
if self.Rule == None or self.Rule == "":
|
||||
if self.Rule is None or self.Rule == "":
|
||||
self.Rule = "BINARY"
|
||||
|
||||
if not IsMakefile and GenFdsGlobalVariable.EnableGenfdsMultiThread and self.Rule != 'BINARY':
|
||||
|
@ -545,7 +545,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
#
|
||||
def __GetRule__ (self) :
|
||||
CurrentArchList = []
|
||||
if self.CurrentArch == None:
|
||||
if self.CurrentArch is None:
|
||||
CurrentArchList = ['common']
|
||||
else:
|
||||
CurrentArchList.append(self.CurrentArch)
|
||||
|
@ -556,13 +556,13 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
CurrentArch.upper() + \
|
||||
'.' + \
|
||||
self.ModuleType.upper()
|
||||
if self.Rule != None:
|
||||
if self.Rule is not None:
|
||||
RuleName = RuleName + \
|
||||
'.' + \
|
||||
self.Rule.upper()
|
||||
|
||||
Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
|
||||
if Rule != None:
|
||||
if Rule is not None:
|
||||
GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
|
||||
return Rule
|
||||
|
||||
|
@ -572,7 +572,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
'.' + \
|
||||
self.ModuleType.upper()
|
||||
|
||||
if self.Rule != None:
|
||||
if self.Rule is not None:
|
||||
RuleName = RuleName + \
|
||||
'.' + \
|
||||
self.Rule.upper()
|
||||
|
@ -580,11 +580,11 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
GenFdsGlobalVariable.VerboseLogger ('Trying to apply common rule %s for INF %s' % (RuleName, self.InfFileName))
|
||||
|
||||
Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
|
||||
if Rule != None:
|
||||
if Rule is not None:
|
||||
GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
|
||||
return Rule
|
||||
|
||||
if Rule == None :
|
||||
if Rule is None :
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, 'Don\'t Find common rule %s for INF %s' \
|
||||
% (RuleName, self.InfFileName))
|
||||
|
||||
|
@ -601,7 +601,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
DscArchList = []
|
||||
for Arch in GenFdsGlobalVariable.ArchList :
|
||||
PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
|
||||
if PlatformDataBase != None:
|
||||
if PlatformDataBase is not None:
|
||||
if InfFileKey in PlatformDataBase.Modules:
|
||||
DscArchList.append (Arch)
|
||||
else:
|
||||
|
@ -648,7 +648,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
ArchList = CurArchList
|
||||
|
||||
UseArchList = TargetArchList
|
||||
if self.UseArch != None:
|
||||
if self.UseArch is not None:
|
||||
UseArchList = []
|
||||
UseArchList.append(self.UseArch)
|
||||
ArchList = list(set (UseArchList) & set (ArchList))
|
||||
|
@ -689,7 +689,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
if self.OverrideGuid:
|
||||
FileName = self.OverrideGuid
|
||||
Arch = "NoneArch"
|
||||
if self.CurrentArch != None:
|
||||
if self.CurrentArch is not None:
|
||||
Arch = self.CurrentArch
|
||||
|
||||
OutputPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
|
||||
|
@ -723,7 +723,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
FileList = []
|
||||
OutputFileList = []
|
||||
GenSecInputFile = None
|
||||
if Rule.FileName != None:
|
||||
if Rule.FileName is not None:
|
||||
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
|
||||
if os.path.isabs(GenSecInputFile):
|
||||
GenSecInputFile = os.path.normpath(GenSecInputFile)
|
||||
|
@ -748,11 +748,11 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM_DEPEX section type", File=self.InfFileName)
|
||||
NoStrip = True
|
||||
if self.ModuleType in ('SEC', 'PEI_CORE', 'PEIM'):
|
||||
if self.KeepReloc != None:
|
||||
if self.KeepReloc is not None:
|
||||
NoStrip = self.KeepReloc
|
||||
elif Rule.KeepReloc != None:
|
||||
elif Rule.KeepReloc is not None:
|
||||
NoStrip = Rule.KeepReloc
|
||||
elif self.ShadowFromInfFile != None:
|
||||
elif self.ShadowFromInfFile is not None:
|
||||
NoStrip = self.ShadowFromInfFile
|
||||
|
||||
if FileList != [] :
|
||||
|
@ -868,7 +868,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
InputSection.append(InputFile)
|
||||
SectionAlignments.append(Rule.SectAlignment)
|
||||
|
||||
if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('):
|
||||
if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
|
||||
PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
|
||||
if len(PcdValue) == 0:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
|
||||
|
@ -902,7 +902,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
#
|
||||
def __GenComplexFileSection__(self, Rule, FvChildAddr, FvParentAddr, IsMakefile = False):
|
||||
if self.ModuleType in ('SEC', 'PEI_CORE', 'PEIM'):
|
||||
if Rule.KeepReloc != None:
|
||||
if Rule.KeepReloc is not None:
|
||||
self.KeepRelocFromRule = Rule.KeepReloc
|
||||
SectFiles = []
|
||||
SectAlignments = []
|
||||
|
@ -957,7 +957,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
Sect.FvAddr = FvChildAddr.pop(0)
|
||||
elif isinstance(Sect, GuidSection):
|
||||
Sect.FvAddr = FvChildAddr
|
||||
if FvParentAddr != None and isinstance(Sect, GuidSection):
|
||||
if FvParentAddr is not None and isinstance(Sect, GuidSection):
|
||||
Sect.FvParentAddr = FvParentAddr
|
||||
|
||||
if Rule.KeyStringList != []:
|
||||
|
@ -1040,7 +1040,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
#
|
||||
def __GenComplexFileFfs__(self, Rule, InputFile, Alignments, MakefilePath = None):
|
||||
|
||||
if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('):
|
||||
if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
|
||||
PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
|
||||
if len(PcdValue) == 0:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
|
||||
|
@ -1079,7 +1079,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
|
|||
if Rule.CheckSum != False:
|
||||
result += ('-s',)
|
||||
|
||||
if Rule.Alignment != None and Rule.Alignment != '':
|
||||
if Rule.Alignment is not None and Rule.Alignment != '':
|
||||
result += ('-a', Rule.Alignment)
|
||||
|
||||
return result
|
||||
|
|
|
@ -70,14 +70,14 @@ class FV (FvClassObject):
|
|||
#
|
||||
def AddToBuffer (self, Buffer, BaseAddress=None, BlockSize= None, BlockNum=None, ErasePloarity='1', VtfDict=None, MacroDict = {}, Flag=False) :
|
||||
|
||||
if BaseAddress == None and self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict.keys():
|
||||
if BaseAddress is None and self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict.keys():
|
||||
return GenFds.ImageBinDict[self.UiFvName.upper() + 'fv']
|
||||
|
||||
#
|
||||
# Check whether FV in Capsule is in FD flash region.
|
||||
# If yes, return error. Doesn't support FV in Capsule image is also in FD flash region.
|
||||
#
|
||||
if self.CapsuleName != None:
|
||||
if self.CapsuleName is not None:
|
||||
for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
|
||||
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName]
|
||||
for RegionObj in FdObj.RegionList:
|
||||
|
@ -94,7 +94,7 @@ class FV (FvClassObject):
|
|||
GenFdsGlobalVariable.LargeFileInFvFlags.append(False)
|
||||
FFSGuid = None
|
||||
|
||||
if self.FvBaseAddress != None:
|
||||
if self.FvBaseAddress is not None:
|
||||
BaseAddress = self.FvBaseAddress
|
||||
if not Flag:
|
||||
self.__InitializeInf__(BaseAddress, BlockSize, BlockNum, ErasePloarity, VtfDict)
|
||||
|
@ -136,7 +136,7 @@ class FV (FvClassObject):
|
|||
FvOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName)
|
||||
FvOutputFile = FvOutputFile + '.Fv'
|
||||
# BUGBUG: FvOutputFile could be specified from FDF file (FV section, CreateFile statement)
|
||||
if self.CreateFileName != None:
|
||||
if self.CreateFileName is not None:
|
||||
FvOutputFile = self.CreateFileName
|
||||
|
||||
if Flag:
|
||||
|
@ -163,7 +163,7 @@ class FV (FvClassObject):
|
|||
NewFvInfo = None
|
||||
if os.path.exists (FvInfoFileName):
|
||||
NewFvInfo = open(FvInfoFileName, 'r').read()
|
||||
if NewFvInfo != None and NewFvInfo != OrigFvInfo:
|
||||
if NewFvInfo is not None and NewFvInfo != OrigFvInfo:
|
||||
FvChildAddr = []
|
||||
AddFileObj = open(FvInfoFileName, 'r')
|
||||
AddrStrings = AddFileObj.readlines()
|
||||
|
@ -273,16 +273,16 @@ class FV (FvClassObject):
|
|||
# Add [Options]
|
||||
#
|
||||
self.FvInfFile.writelines("[options]" + T_CHAR_LF)
|
||||
if BaseAddress != None :
|
||||
if BaseAddress is not None :
|
||||
self.FvInfFile.writelines("EFI_BASE_ADDRESS = " + \
|
||||
BaseAddress + \
|
||||
T_CHAR_LF)
|
||||
|
||||
if BlockSize != None:
|
||||
if BlockSize is not None:
|
||||
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
|
||||
'0x%X' %BlockSize + \
|
||||
T_CHAR_LF)
|
||||
if BlockNum != None:
|
||||
if BlockNum is not None:
|
||||
self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \
|
||||
' 0x%X' %BlockNum + \
|
||||
T_CHAR_LF)
|
||||
|
@ -293,20 +293,20 @@ class FV (FvClassObject):
|
|||
self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + T_CHAR_LF)
|
||||
|
||||
for BlockSize in self.BlockSizeList :
|
||||
if BlockSize[0] != None:
|
||||
if BlockSize[0] is not None:
|
||||
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
|
||||
'0x%X' %BlockSize[0] + \
|
||||
T_CHAR_LF)
|
||||
|
||||
if BlockSize[1] != None:
|
||||
if BlockSize[1] is not None:
|
||||
self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \
|
||||
' 0x%X' %BlockSize[1] + \
|
||||
T_CHAR_LF)
|
||||
|
||||
if self.BsBaseAddress != None:
|
||||
if self.BsBaseAddress is not None:
|
||||
self.FvInfFile.writelines('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \
|
||||
'0x%X' %self.BsBaseAddress)
|
||||
if self.RtBaseAddress != None:
|
||||
if self.RtBaseAddress is not None:
|
||||
self.FvInfFile.writelines('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \
|
||||
'0x%X' %self.RtBaseAddress)
|
||||
#
|
||||
|
@ -317,7 +317,7 @@ class FV (FvClassObject):
|
|||
self.FvInfFile.writelines("EFI_ERASE_POLARITY = " + \
|
||||
' %s' %ErasePloarity + \
|
||||
T_CHAR_LF)
|
||||
if not (self.FvAttributeDict == None):
|
||||
if not (self.FvAttributeDict is None):
|
||||
for FvAttribute in self.FvAttributeDict.keys() :
|
||||
if FvAttribute == "FvUsedSizeEnable":
|
||||
if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1') :
|
||||
|
@ -328,7 +328,7 @@ class FV (FvClassObject):
|
|||
' = ' + \
|
||||
self.FvAttributeDict[FvAttribute] + \
|
||||
T_CHAR_LF )
|
||||
if self.FvAlignment != None:
|
||||
if self.FvAlignment is not None:
|
||||
self.FvInfFile.writelines("EFI_FVB2_ALIGNMENT_" + \
|
||||
self.FvAlignment.strip() + \
|
||||
" = TRUE" + \
|
||||
|
@ -337,7 +337,7 @@ class FV (FvClassObject):
|
|||
#
|
||||
# Generate FV extension header file
|
||||
#
|
||||
if self.FvNameGuid == None or self.FvNameGuid == '':
|
||||
if self.FvNameGuid is None or self.FvNameGuid == '':
|
||||
if len(self.FvExtEntryType) > 0 or self.UsedSizeEnable:
|
||||
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))
|
||||
|
||||
|
@ -442,7 +442,7 @@ class FV (FvClassObject):
|
|||
# Add [Files]
|
||||
#
|
||||
self.FvInfFile.writelines("[files]" + T_CHAR_LF)
|
||||
if VtfDict != None and self.UiFvName in VtfDict.keys():
|
||||
if VtfDict is not None and self.UiFvName in VtfDict.keys():
|
||||
self.FvInfFile.writelines("EFI_FILE_NAME = " + \
|
||||
VtfDict.get(self.UiFvName) + \
|
||||
T_CHAR_LF)
|
||||
|
|
|
@ -53,7 +53,7 @@ class FvImageSection(FvImageSectionClassObject):
|
|||
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False):
|
||||
|
||||
OutputFileList = []
|
||||
if self.FvFileType != None:
|
||||
if self.FvFileType is not None:
|
||||
FileList, IsSect = Section.Section.GetFileList(FfsInf, self.FvFileType, self.FvFileExtension)
|
||||
if IsSect :
|
||||
return FileList, self.Alignment
|
||||
|
@ -96,20 +96,20 @@ class FvImageSection(FvImageSectionClassObject):
|
|||
#
|
||||
# Generate Fv
|
||||
#
|
||||
if self.FvName != None:
|
||||
if self.FvName is not None:
|
||||
Buffer = StringIO.StringIO('')
|
||||
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)
|
||||
if Fv != None:
|
||||
if Fv is not None:
|
||||
self.Fv = Fv
|
||||
FvFileName = Fv.AddToBuffer(Buffer, self.FvAddr, MacroDict = Dict, Flag=IsMakefile)
|
||||
if Fv.FvAlignment != None:
|
||||
if self.Alignment == None:
|
||||
if Fv.FvAlignment is not None:
|
||||
if self.Alignment is None:
|
||||
self.Alignment = Fv.FvAlignment
|
||||
else:
|
||||
if GenFdsGlobalVariable.GetAlignment (Fv.FvAlignment) > GenFdsGlobalVariable.GetAlignment (self.Alignment):
|
||||
self.Alignment = Fv.FvAlignment
|
||||
else:
|
||||
if self.FvFileName != None:
|
||||
if self.FvFileName is not None:
|
||||
FvFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvFileName)
|
||||
if os.path.isfile(FvFileName):
|
||||
FvFileObj = open (FvFileName,'rb')
|
||||
|
|
|
@ -69,22 +69,22 @@ def main():
|
|||
|
||||
EdkLogger.Initialize()
|
||||
try:
|
||||
if Options.verbose != None:
|
||||
if Options.verbose is not None:
|
||||
EdkLogger.SetLevel(EdkLogger.VERBOSE)
|
||||
GenFdsGlobalVariable.VerboseMode = True
|
||||
|
||||
if Options.FixedAddress != None:
|
||||
if Options.FixedAddress is not None:
|
||||
GenFdsGlobalVariable.FixedLoadAddress = True
|
||||
|
||||
if Options.quiet != None:
|
||||
if Options.quiet is not None:
|
||||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
if Options.debug != None:
|
||||
if Options.debug is not None:
|
||||
EdkLogger.SetLevel(Options.debug + 1)
|
||||
GenFdsGlobalVariable.DebugLevel = Options.debug
|
||||
else:
|
||||
EdkLogger.SetLevel(EdkLogger.INFO)
|
||||
|
||||
if (Options.Workspace == None):
|
||||
if (Options.Workspace is None):
|
||||
EdkLogger.error("GenFds", OPTION_MISSING, "WORKSPACE not defined",
|
||||
ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.")
|
||||
elif not os.path.exists(Options.Workspace):
|
||||
|
@ -179,7 +179,7 @@ def main():
|
|||
# if no tool chain given in command line, get it from target.txt
|
||||
if not GenFdsGlobalVariable.ToolChainTag:
|
||||
ToolChainList = TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
|
||||
if ToolChainList == None or len(ToolChainList) == 0:
|
||||
if ToolChainList is None or len(ToolChainList) == 0:
|
||||
EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.")
|
||||
if len(ToolChainList) != 1:
|
||||
EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for ToolChain.")
|
||||
|
@ -300,7 +300,7 @@ def main():
|
|||
"No such a Capsule in FDF file: %s" % Options.uiCapName)
|
||||
|
||||
GenFdsGlobalVariable.WorkSpace = BuildWorkSpace
|
||||
if ArchList != None:
|
||||
if ArchList is not None:
|
||||
GenFdsGlobalVariable.ArchList = ArchList
|
||||
|
||||
# Dsc Build Data will handle Pcd Settings from CommandLine.
|
||||
|
@ -340,7 +340,7 @@ def main():
|
|||
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
|
||||
ReturnCode = FORMAT_INVALID
|
||||
except FatalError, X:
|
||||
if Options.debug != None:
|
||||
if Options.debug is not None:
|
||||
import traceback
|
||||
EdkLogger.quiet(traceback.format_exc())
|
||||
ReturnCode = X.args[0]
|
||||
|
@ -378,7 +378,7 @@ def SingleCheckCallback(option, opt_str, value, parser):
|
|||
def FindExtendTool(KeyStringList, CurrentArchList, NameGuid):
|
||||
ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase
|
||||
# if user not specify filter, try to deduce it from global data.
|
||||
if KeyStringList == None or KeyStringList == []:
|
||||
if KeyStringList is None or KeyStringList == []:
|
||||
Target = GenFdsGlobalVariable.TargetName
|
||||
ToolChain = GenFdsGlobalVariable.ToolChainTag
|
||||
if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
|
||||
|
@ -411,7 +411,7 @@ def FindExtendTool(KeyStringList, CurrentArchList, NameGuid):
|
|||
ToolOptionKey = Key + '_' + KeyList[3] + '_FLAGS'
|
||||
ToolPath = ToolDefinition.get(ToolPathKey)
|
||||
ToolOption = ToolDefinition.get(ToolOptionKey)
|
||||
if ToolPathTmp == None:
|
||||
if ToolPathTmp is None:
|
||||
ToolPathTmp = ToolPath
|
||||
else:
|
||||
if ToolPathTmp != ToolPath:
|
||||
|
@ -523,38 +523,38 @@ class GenFds :
|
|||
GenFdsGlobalVariable.SetDir ('', FdfParser, WorkSpace, ArchList)
|
||||
|
||||
GenFdsGlobalVariable.VerboseLogger(" Generate all Fd images and their required FV and Capsule images!")
|
||||
if GenFds.OnlyGenerateThisCap != None and GenFds.OnlyGenerateThisCap.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
|
||||
if GenFds.OnlyGenerateThisCap is not None and GenFds.OnlyGenerateThisCap.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
|
||||
CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.get(GenFds.OnlyGenerateThisCap.upper())
|
||||
if CapsuleObj != None:
|
||||
if CapsuleObj is not None:
|
||||
CapsuleObj.GenCapsule()
|
||||
return
|
||||
|
||||
if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
|
||||
if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
|
||||
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(GenFds.OnlyGenerateThisFd.upper())
|
||||
if FdObj != None:
|
||||
if FdObj is not None:
|
||||
FdObj.GenFd()
|
||||
return
|
||||
elif GenFds.OnlyGenerateThisFd == None and GenFds.OnlyGenerateThisFv == None:
|
||||
elif GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisFv is None:
|
||||
for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
|
||||
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName]
|
||||
FdObj.GenFd()
|
||||
|
||||
GenFdsGlobalVariable.VerboseLogger("\n Generate other FV images! ")
|
||||
if GenFds.OnlyGenerateThisFv != None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
|
||||
if GenFds.OnlyGenerateThisFv is not None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
|
||||
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(GenFds.OnlyGenerateThisFv.upper())
|
||||
if FvObj != None:
|
||||
if FvObj is not None:
|
||||
Buffer = StringIO.StringIO()
|
||||
FvObj.AddToBuffer(Buffer)
|
||||
Buffer.close()
|
||||
return
|
||||
elif GenFds.OnlyGenerateThisFv == None:
|
||||
elif GenFds.OnlyGenerateThisFv is None:
|
||||
for FvName in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
|
||||
Buffer = StringIO.StringIO('')
|
||||
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[FvName]
|
||||
FvObj.AddToBuffer(Buffer)
|
||||
Buffer.close()
|
||||
|
||||
if GenFds.OnlyGenerateThisFv == None and GenFds.OnlyGenerateThisFd == None and GenFds.OnlyGenerateThisCap == None:
|
||||
if GenFds.OnlyGenerateThisFv is None and GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisCap is None:
|
||||
if GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict != {}:
|
||||
GenFdsGlobalVariable.VerboseLogger("\n Generate other Capsule images!")
|
||||
for CapsuleName in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
|
||||
|
@ -592,14 +592,14 @@ class GenFds :
|
|||
def GetFvBlockSize(FvObj):
|
||||
DefaultBlockSize = 0x1
|
||||
FdObj = None
|
||||
if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
|
||||
if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
|
||||
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()]
|
||||
if FdObj == None:
|
||||
if FdObj is None:
|
||||
for ElementFd in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
|
||||
for ElementRegion in ElementFd.RegionList:
|
||||
if ElementRegion.RegionType == 'FV':
|
||||
for ElementRegionData in ElementRegion.RegionDataList:
|
||||
if ElementRegionData != None and ElementRegionData.upper() == FvObj.UiFvName:
|
||||
if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
|
||||
if FvObj.BlockSizeList != []:
|
||||
return FvObj.BlockSizeList[0][0]
|
||||
else:
|
||||
|
@ -611,7 +611,7 @@ class GenFds :
|
|||
for ElementRegion in FdObj.RegionList:
|
||||
if ElementRegion.RegionType == 'FV':
|
||||
for ElementRegionData in ElementRegion.RegionDataList:
|
||||
if ElementRegionData != None and ElementRegionData.upper() == FvObj.UiFvName:
|
||||
if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
|
||||
if FvObj.BlockSizeList != []:
|
||||
return FvObj.BlockSizeList[0][0]
|
||||
else:
|
||||
|
|
|
@ -229,7 +229,7 @@ class GenFdsGlobalVariable:
|
|||
Source = SourceList[Index]
|
||||
Index = Index + 1
|
||||
|
||||
if File.IsBinary and File == Source and Inf.Binaries != None and File in Inf.Binaries:
|
||||
if File.IsBinary and File == Source and Inf.Binaries is not None and File in Inf.Binaries:
|
||||
# Skip all files that are not binary libraries
|
||||
if not Inf.LibraryClass:
|
||||
continue
|
||||
|
@ -420,7 +420,7 @@ class GenFdsGlobalVariable:
|
|||
if not os.path.exists(Output):
|
||||
return True
|
||||
# always update "Output" if no "Input" given
|
||||
if Input == None or len(Input) == 0:
|
||||
if Input is None or len(Input) == 0:
|
||||
return True
|
||||
|
||||
# if fdf file is changed after the 'Output" is generated, update the 'Output'
|
||||
|
@ -445,9 +445,9 @@ class GenFdsGlobalVariable:
|
|||
Cmd += ["-s", Type]
|
||||
if CompressionType not in [None, '']:
|
||||
Cmd += ["-c", CompressionType]
|
||||
if Guid != None:
|
||||
if Guid is not None:
|
||||
Cmd += ["-g", Guid]
|
||||
if DummyFile != None:
|
||||
if DummyFile is not None:
|
||||
Cmd += ["--dummy", DummyFile]
|
||||
if GuidHdrLen not in [None, '']:
|
||||
Cmd += ["-l", GuidHdrLen]
|
||||
|
@ -455,7 +455,7 @@ class GenFdsGlobalVariable:
|
|||
#Add each guided attribute
|
||||
for Attr in GuidAttr:
|
||||
Cmd += ["-r", Attr]
|
||||
if InputAlign != None:
|
||||
if InputAlign is not None:
|
||||
#Section Align is only for dummy section without section type
|
||||
for SecAlign in InputAlign:
|
||||
Cmd += ["--sectionalign", SecAlign]
|
||||
|
@ -509,7 +509,7 @@ class GenFdsGlobalVariable:
|
|||
|
||||
@staticmethod
|
||||
def GetAlignment (AlignString):
|
||||
if AlignString == None:
|
||||
if AlignString is None:
|
||||
return 0
|
||||
if AlignString in ("1K", "2K", "4K", "8K", "16K", "32K", "64K", "128K", "256K", "512K"):
|
||||
return int (AlignString.rstrip('K')) * 1024
|
||||
|
@ -669,13 +669,13 @@ class GenFdsGlobalVariable:
|
|||
return
|
||||
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, InputList))
|
||||
|
||||
if ClassCode != None:
|
||||
if ClassCode is not None:
|
||||
Cmd += ["-l", ClassCode]
|
||||
if Revision != None:
|
||||
if Revision is not None:
|
||||
Cmd += ["-r", Revision]
|
||||
if DeviceId != None:
|
||||
if DeviceId is not None:
|
||||
Cmd += ["-i", DeviceId]
|
||||
if VendorId != None:
|
||||
if VendorId is not None:
|
||||
Cmd += ["-f", VendorId]
|
||||
|
||||
Cmd += ["-o", Output]
|
||||
|
@ -726,7 +726,7 @@ class GenFdsGlobalVariable:
|
|||
EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
|
||||
(out, error) = PopenObject.communicate()
|
||||
|
||||
while PopenObject.returncode == None :
|
||||
while PopenObject.returncode is None :
|
||||
PopenObject.wait()
|
||||
if returnValue != [] and returnValue[0] != 0:
|
||||
#get command return value
|
||||
|
@ -758,7 +758,7 @@ class GenFdsGlobalVariable:
|
|||
# @param MacroDict Dictionary that contains macro value pair
|
||||
#
|
||||
def MacroExtend (Str, MacroDict={}, Arch='COMMON'):
|
||||
if Str == None :
|
||||
if Str is None :
|
||||
return None
|
||||
|
||||
Dict = {'$(WORKSPACE)' : GenFdsGlobalVariable.WorkSpaceDir,
|
||||
|
@ -774,7 +774,7 @@ class GenFdsGlobalVariable:
|
|||
|
||||
Dict['$(OUTPUT_DIRECTORY)'] = OutputDir
|
||||
|
||||
if MacroDict != None and len (MacroDict) != 0:
|
||||
if MacroDict is not None and len (MacroDict) != 0:
|
||||
Dict.update(MacroDict)
|
||||
|
||||
for key in Dict.keys():
|
||||
|
@ -794,7 +794,7 @@ class GenFdsGlobalVariable:
|
|||
# @param PcdPattern pattern that labels a PCD.
|
||||
#
|
||||
def GetPcdValue (PcdPattern):
|
||||
if PcdPattern == None :
|
||||
if PcdPattern is None :
|
||||
return None
|
||||
PcdPair = PcdPattern.lstrip('PCD(').rstrip(')').strip().split('.')
|
||||
TokenSpace = PcdPair[0]
|
||||
|
|
|
@ -60,7 +60,7 @@ class GuidSection(GuidSectionClassObject) :
|
|||
#
|
||||
self.KeyStringList = KeyStringList
|
||||
self.CurrentArchList = GenFdsGlobalVariable.ArchList
|
||||
if FfsInf != None:
|
||||
if FfsInf is not None:
|
||||
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
|
||||
self.NameGuid = FfsInf.__ExtendMacro__(self.NameGuid)
|
||||
self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)
|
||||
|
@ -79,7 +79,7 @@ class GuidSection(GuidSectionClassObject) :
|
|||
if self.FvAddr != []:
|
||||
#no use FvAddr when the image is processed.
|
||||
self.FvAddr = []
|
||||
if self.FvParentAddr != None:
|
||||
if self.FvParentAddr is not None:
|
||||
#no use Parent Addr when the image is processed.
|
||||
self.FvParentAddr = None
|
||||
|
||||
|
@ -99,20 +99,20 @@ class GuidSection(GuidSectionClassObject) :
|
|||
if Sect.IncludeFvSection:
|
||||
self.IncludeFvSection = Sect.IncludeFvSection
|
||||
|
||||
if align != None:
|
||||
if MaxAlign == None:
|
||||
if align is not None:
|
||||
if MaxAlign is None:
|
||||
MaxAlign = align
|
||||
if GenFdsGlobalVariable.GetAlignment (align) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
|
||||
MaxAlign = align
|
||||
if ReturnSectList != []:
|
||||
if align == None:
|
||||
if align is None:
|
||||
align = "1"
|
||||
for file in ReturnSectList:
|
||||
SectFile += (file,)
|
||||
SectAlign.append(align)
|
||||
|
||||
if MaxAlign != None:
|
||||
if self.Alignment == None:
|
||||
if MaxAlign is not None:
|
||||
if self.Alignment is None:
|
||||
self.Alignment = MaxAlign
|
||||
else:
|
||||
if GenFdsGlobalVariable.GetAlignment (MaxAlign) > GenFdsGlobalVariable.GetAlignment (self.Alignment):
|
||||
|
@ -128,21 +128,21 @@ class GuidSection(GuidSectionClassObject) :
|
|||
|
||||
ExternalTool = None
|
||||
ExternalOption = None
|
||||
if self.NameGuid != None:
|
||||
if self.NameGuid is not None:
|
||||
ExternalTool, ExternalOption = FindExtendTool(self.KeyStringList, self.CurrentArchList, self.NameGuid)
|
||||
|
||||
#
|
||||
# If not have GUID , call default
|
||||
# GENCRC32 section
|
||||
#
|
||||
if self.NameGuid == None :
|
||||
if self.NameGuid is None :
|
||||
GenFdsGlobalVariable.VerboseLogger("Use GenSection function Generate CRC32 Section")
|
||||
GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign, IsMakefile=IsMakefile)
|
||||
OutputFileList = []
|
||||
OutputFileList.append(OutputFile)
|
||||
return OutputFileList, self.Alignment
|
||||
#or GUID not in External Tool List
|
||||
elif ExternalTool == None:
|
||||
elif ExternalTool is None:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid)
|
||||
else:
|
||||
DummyFile = OutputFile + ".dummy"
|
||||
|
@ -170,10 +170,10 @@ class GuidSection(GuidSectionClassObject) :
|
|||
|
||||
FirstCall = False
|
||||
CmdOption = '-e'
|
||||
if ExternalOption != None:
|
||||
if ExternalOption is not None:
|
||||
CmdOption = CmdOption + ' ' + ExternalOption
|
||||
if not GenFdsGlobalVariable.EnableGenfdsMultiThread:
|
||||
if self.ProcessRequired not in ("TRUE", "1") and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr != None:
|
||||
if self.ProcessRequired not in ("TRUE", "1") and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr is not None:
|
||||
#FirstCall is only set for the encapsulated flash FV image without process required attribute.
|
||||
FirstCall = True
|
||||
#
|
||||
|
@ -213,7 +213,7 @@ class GuidSection(GuidSectionClassObject) :
|
|||
if self.ExtraHeaderSize != -1:
|
||||
HeaderLength = str(self.ExtraHeaderSize)
|
||||
|
||||
if self.ProcessRequired == "NONE" and HeaderLength == None:
|
||||
if self.ProcessRequired == "NONE" and HeaderLength is None:
|
||||
if TempFileSize > InputFileSize:
|
||||
FileHandleIn.seek(0)
|
||||
BufferIn = FileHandleIn.read()
|
||||
|
@ -222,7 +222,7 @@ class GuidSection(GuidSectionClassObject) :
|
|||
if BufferIn == BufferOut[TempFileSize - InputFileSize:]:
|
||||
HeaderLength = str(TempFileSize - InputFileSize)
|
||||
#auto sec guided attribute with process required
|
||||
if HeaderLength == None:
|
||||
if HeaderLength is None:
|
||||
Attribute.append('PROCESSING_REQUIRED')
|
||||
|
||||
FileHandleIn.close()
|
||||
|
@ -253,7 +253,7 @@ class GuidSection(GuidSectionClassObject) :
|
|||
HeaderLength = str(self.ExtraHeaderSize)
|
||||
if self.AuthStatusValid in ("TRUE", "1"):
|
||||
Attribute.append('AUTH_STATUS_VALID')
|
||||
if self.ProcessRequired == "NONE" and HeaderLength == None:
|
||||
if self.ProcessRequired == "NONE" and HeaderLength is None:
|
||||
GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],
|
||||
Guid=self.NameGuid, GuidAttr=Attribute,
|
||||
GuidHdrLen=HeaderLength, DummyFile=DummyFile, IsMakefile=IsMakefile)
|
||||
|
|
|
@ -41,7 +41,7 @@ class OptRomFileStatement:
|
|||
#
|
||||
def GenFfs(self, Dict = {}, IsMakefile=False):
|
||||
|
||||
if self.FileName != None:
|
||||
if self.FileName is not None:
|
||||
self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
|
||||
|
||||
return self.FileName
|
||||
|
|
|
@ -46,10 +46,10 @@ class OptRomInfStatement (FfsInfStatement):
|
|||
#
|
||||
def __GetOptRomParams(self):
|
||||
|
||||
if self.OverrideAttribs == None:
|
||||
if self.OverrideAttribs is None:
|
||||
self.OverrideAttribs = OptionRom.OverrideAttribs()
|
||||
|
||||
if self.OverrideAttribs.NeedCompress == None:
|
||||
if self.OverrideAttribs.NeedCompress is None:
|
||||
self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('PCI_COMPRESS')
|
||||
if self.OverrideAttribs.NeedCompress is not None:
|
||||
if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
|
||||
|
@ -57,16 +57,16 @@ class OptRomInfStatement (FfsInfStatement):
|
|||
self.OverrideAttribs.NeedCompress = \
|
||||
self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
|
||||
|
||||
if self.OverrideAttribs.PciVendorId == None:
|
||||
if self.OverrideAttribs.PciVendorId is None:
|
||||
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
|
||||
|
||||
if self.OverrideAttribs.PciClassCode == None:
|
||||
if self.OverrideAttribs.PciClassCode is None:
|
||||
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
|
||||
|
||||
if self.OverrideAttribs.PciDeviceId == None:
|
||||
if self.OverrideAttribs.PciDeviceId is None:
|
||||
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
|
||||
|
||||
if self.OverrideAttribs.PciRevision == None:
|
||||
if self.OverrideAttribs.PciRevision is None:
|
||||
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
|
||||
|
||||
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
|
||||
|
@ -121,7 +121,7 @@ class OptRomInfStatement (FfsInfStatement):
|
|||
#
|
||||
|
||||
OutputFileList = []
|
||||
if Rule.FileName != None:
|
||||
if Rule.FileName is not None:
|
||||
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
|
||||
OutputFileList.append(GenSecInputFile)
|
||||
else:
|
||||
|
@ -143,7 +143,7 @@ class OptRomInfStatement (FfsInfStatement):
|
|||
OutputFileList = []
|
||||
for Sect in Rule.SectionList:
|
||||
if Sect.SectionType == 'PE32':
|
||||
if Sect.FileName != None:
|
||||
if Sect.FileName is not None:
|
||||
GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
|
||||
OutputFileList.append(GenSecInputFile)
|
||||
else:
|
||||
|
|
|
@ -63,7 +63,7 @@ class OPTIONROM (OptionRomClassObject):
|
|||
FilePathNameList = FfsFile.GenFfs(IsMakefile=Flag)
|
||||
if len(FilePathNameList) == 0:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s not produce .efi files, so NO file could be put into option ROM." % (FfsFile.InfFileName))
|
||||
if FfsFile.OverrideAttribs == None:
|
||||
if FfsFile.OverrideAttribs is None:
|
||||
EfiFileList.extend(FilePathNameList)
|
||||
else:
|
||||
FileName = os.path.basename(FilePathNameList[0])
|
||||
|
@ -84,7 +84,7 @@ class OPTIONROM (OptionRomClassObject):
|
|||
BinFileList.append(TmpOutputFile)
|
||||
else:
|
||||
FilePathName = FfsFile.GenFfs(IsMakefile=Flag)
|
||||
if FfsFile.OverrideAttribs != None:
|
||||
if FfsFile.OverrideAttribs is not None:
|
||||
FileName = os.path.basename(FilePathName)
|
||||
TmpOutputDir = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName, FfsFile.CurrentArch)
|
||||
if not os.path.exists(TmpOutputDir) :
|
||||
|
|
|
@ -114,7 +114,7 @@ class Region(RegionClassObject):
|
|||
if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
|
||||
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(RegionData.upper())
|
||||
|
||||
if FvObj != None :
|
||||
if FvObj is not None :
|
||||
if not Flag:
|
||||
GenFdsGlobalVariable.InfLogger(' Region Name = FV')
|
||||
#
|
||||
|
@ -152,7 +152,7 @@ class Region(RegionClassObject):
|
|||
# Add the exist Fv image into FD buffer
|
||||
#
|
||||
if not Flag:
|
||||
if FileName != None:
|
||||
if FileName is not None:
|
||||
FileLength = os.stat(FileName)[ST_SIZE]
|
||||
if FileLength > Size:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR,
|
||||
|
@ -193,7 +193,7 @@ class Region(RegionClassObject):
|
|||
if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
|
||||
CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[RegionData.upper()]
|
||||
|
||||
if CapsuleObj != None :
|
||||
if CapsuleObj is not None :
|
||||
CapsuleObj.CapsuleName = RegionData.upper()
|
||||
GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE')
|
||||
#
|
||||
|
@ -270,7 +270,7 @@ class Region(RegionClassObject):
|
|||
#
|
||||
self.PadBuffer(Buffer, ErasePolarity, Size)
|
||||
|
||||
if self.RegionType == None:
|
||||
if self.RegionType is None:
|
||||
GenFdsGlobalVariable.InfLogger(' Region Name = None')
|
||||
self.PadBuffer(Buffer, ErasePolarity, Size)
|
||||
|
||||
|
@ -333,7 +333,7 @@ class Region(RegionClassObject):
|
|||
# first check whether FvObj.BlockSizeList items have only "BlockSize" or "NumBlocks",
|
||||
# if so, use ExpectedList
|
||||
for Item in FvObj.BlockSizeList:
|
||||
if Item[0] == None or Item[1] == None:
|
||||
if Item[0] is None or Item[1] is None:
|
||||
FvObj.BlockSizeList = ExpectedList
|
||||
break
|
||||
# make sure region size is no smaller than the summed block size in FV
|
||||
|
|
|
@ -116,17 +116,17 @@ class Section (SectionClassObject):
|
|||
else :
|
||||
IsSect = False
|
||||
|
||||
if FileExtension != None:
|
||||
if FileExtension is not None:
|
||||
Suffix = FileExtension
|
||||
elif IsSect :
|
||||
Suffix = Section.SectionType.get(FileType)
|
||||
else:
|
||||
Suffix = Section.BinFileType.get(FileType)
|
||||
if FfsInf == None:
|
||||
if FfsInf is None:
|
||||
EdkLogger.error("GenFds", GENFDS_ERROR, 'Inf File does not exist!')
|
||||
|
||||
FileList = []
|
||||
if FileType != None:
|
||||
if FileType is not None:
|
||||
for File in FfsInf.BinFileList:
|
||||
if File.Arch == "COMMON" or FfsInf.CurrentArch == File.Arch:
|
||||
if File.Type == FileType or (int(FfsInf.PiSpecVersion, 16) >= 0x0001000A \
|
||||
|
@ -141,7 +141,7 @@ class Section (SectionClassObject):
|
|||
else:
|
||||
GenFdsGlobalVariable.InfLogger ("\nCurrent ARCH \'%s\' of File %s is not in the Support Arch Scope of %s specified by INF %s in FDF" %(FfsInf.CurrentArch, File.File, File.Arch, FfsInf.InfFileName))
|
||||
|
||||
if (not IsMakefile and Suffix != None and os.path.exists(FfsInf.EfiOutputPath)) or (IsMakefile and Suffix != None):
|
||||
if (not IsMakefile and Suffix is not None and os.path.exists(FfsInf.EfiOutputPath)) or (IsMakefile and Suffix is not None):
|
||||
#
|
||||
# Get Makefile path and time stamp
|
||||
#
|
||||
|
|
|
@ -52,16 +52,16 @@ class UiSection (UiSectionClassObject):
|
|||
#
|
||||
# Prepare the parameter of GenSection
|
||||
#
|
||||
if FfsInf != None:
|
||||
if FfsInf is not None:
|
||||
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
|
||||
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
|
||||
self.FileName = FfsInf.__ExtendMacro__(self.FileName)
|
||||
|
||||
OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + SecNum + Ffs.SectionSuffix.get('UI'))
|
||||
|
||||
if self.StringData != None :
|
||||
if self.StringData is not None :
|
||||
NameString = self.StringData
|
||||
elif self.FileName != None:
|
||||
elif self.FileName is not None:
|
||||
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
|
||||
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
|
||||
FileObj = open(FileNameStr, 'r')
|
||||
|
|
|
@ -52,7 +52,7 @@ class VerSection (VerSectionClassObject):
|
|||
#
|
||||
# Prepare the parameter of GenSection
|
||||
#
|
||||
if FfsInf != None:
|
||||
if FfsInf is not None:
|
||||
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
|
||||
self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
|
||||
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
|
||||
|
@ -64,9 +64,9 @@ class VerSection (VerSectionClassObject):
|
|||
|
||||
# Get String Data
|
||||
StringData = ''
|
||||
if self.StringData != None:
|
||||
if self.StringData is not None:
|
||||
StringData = self.StringData
|
||||
elif self.FileName != None:
|
||||
elif self.FileName is not None:
|
||||
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
|
||||
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
|
||||
FileObj = open(FileNameStr, 'r')
|
||||
|
|
|
@ -68,7 +68,7 @@ class Vtf (VtfClassObject):
|
|||
FvList = self.GetFvList()
|
||||
self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf')
|
||||
BsfInf = open(self.BsfInfName, 'w+')
|
||||
if self.ResetBin != None:
|
||||
if self.ResetBin is not None:
|
||||
BsfInf.writelines ("[OPTIONS]" + T_CHAR_LF)
|
||||
BsfInf.writelines ("IA32_RST_BIN" + \
|
||||
" = " + \
|
||||
|
@ -89,7 +89,7 @@ class Vtf (VtfClassObject):
|
|||
'N' + \
|
||||
T_CHAR_LF)
|
||||
|
||||
elif ComponentObj.FilePos != None:
|
||||
elif ComponentObj.FilePos is not None:
|
||||
BsfInf.writelines ("COMP_LOC" + \
|
||||
" = " + \
|
||||
ComponentObj.FilePos + \
|
||||
|
|
|
@ -73,7 +73,7 @@ def _parseForXcode(lines, efifilepath):
|
|||
if status == 1 and len(line) != 0:
|
||||
if '_gPcd_BinaryPatch_' in line:
|
||||
m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*_gPcd_BinaryPatch_([\w]+))', line)
|
||||
if m != None:
|
||||
if m is not None:
|
||||
pcds.append((m.groups(0)[3], int(m.groups(0)[0], 16)))
|
||||
return pcds
|
||||
|
||||
|
@ -99,20 +99,20 @@ def _parseForGCC(lines, efifilepath):
|
|||
# status handler
|
||||
if status == 3:
|
||||
m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line)
|
||||
if m != None:
|
||||
if m is not None:
|
||||
sections.append(m.groups(0))
|
||||
if status == 3:
|
||||
m = re.match('^.data._gPcd_BinaryPatch_([\w_\d]+)$', line)
|
||||
if m != None:
|
||||
if m is not None:
|
||||
if lines[index + 1]:
|
||||
PcdName = m.groups(0)[0]
|
||||
m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', lines[index + 1].strip())
|
||||
if m != None:
|
||||
if m is not None:
|
||||
bpcds.append((PcdName, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0]))
|
||||
|
||||
# get section information from efi file
|
||||
efisecs = PeImageClass(efifilepath).SectionHeaderList
|
||||
if efisecs == None or len(efisecs) == 0:
|
||||
if efisecs is None or len(efisecs) == 0:
|
||||
return None
|
||||
#redirection
|
||||
redirection = 0
|
||||
|
@ -152,18 +152,18 @@ def _parseGeneral(lines, efifilepath):
|
|||
continue
|
||||
if status == 1 and len(line) != 0:
|
||||
m = secRe.match(line)
|
||||
assert m != None, "Fail to parse the section in map file , line is %s" % line
|
||||
assert m is not None, "Fail to parse the section in map file , line is %s" % line
|
||||
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
|
||||
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
|
||||
if status == 2 and len(line) != 0:
|
||||
m = symRe.match(line)
|
||||
assert m != None, "Fail to parse the symbol in map file, line is %s" % line
|
||||
assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
|
||||
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
|
||||
sec_no = int(sec_no, 16)
|
||||
sym_offset = int(sym_offset, 16)
|
||||
vir_addr = int(vir_addr, 16)
|
||||
m2 = re.match('^[_]+gPcd_BinaryPatch_([\w]+)', sym_name)
|
||||
if m2 != None:
|
||||
if m2 is not None:
|
||||
# fond a binary pcd entry in map file
|
||||
for sec in secs:
|
||||
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
|
||||
|
@ -173,7 +173,7 @@ def _parseGeneral(lines, efifilepath):
|
|||
|
||||
# get section information from efi file
|
||||
efisecs = PeImageClass(efifilepath).SectionHeaderList
|
||||
if efisecs == None or len(efisecs) == 0:
|
||||
if efisecs is None or len(efisecs) == 0:
|
||||
return None
|
||||
|
||||
pcds = []
|
||||
|
@ -214,12 +214,12 @@ if __name__ == '__main__':
|
|||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if options.mapfile == None or options.efifile == None:
|
||||
if options.mapfile is None or options.efifile is None:
|
||||
print parser.get_usage()
|
||||
elif os.path.exists(options.mapfile) and os.path.exists(options.efifile):
|
||||
list = parsePcdInfoFromMapFile(options.mapfile, options.efifile)
|
||||
if list != None:
|
||||
if options.outfile != None:
|
||||
if list is not None:
|
||||
if options.outfile is not None:
|
||||
generatePcdTable(list, options.outfile)
|
||||
else:
|
||||
generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt'))
|
||||
|
|
|
@ -267,13 +267,13 @@ def Main():
|
|||
if not os.path.exists (InputFile):
|
||||
EdkLogger.error("PatchPcdValue", FILE_NOT_FOUND, ExtraData=InputFile)
|
||||
return 1
|
||||
if CommandOptions.PcdOffset == None or CommandOptions.PcdValue == None or CommandOptions.PcdTypeName == None:
|
||||
if CommandOptions.PcdOffset is None or CommandOptions.PcdValue is None or CommandOptions.PcdTypeName is None:
|
||||
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.")
|
||||
return 1
|
||||
if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]:
|
||||
EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." % (CommandOptions.PcdTypeName))
|
||||
return 1
|
||||
if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None:
|
||||
if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize is None:
|
||||
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.")
|
||||
return 1
|
||||
#
|
||||
|
|
|
@ -85,7 +85,7 @@ class TargetTool():
|
|||
for Key in KeyList:
|
||||
if type(self.TargetTxtDictionary[Key]) == type([]):
|
||||
print "%-30s = %s" % (Key, ''.join(elem + ' ' for elem in self.TargetTxtDictionary[Key]))
|
||||
elif self.TargetTxtDictionary[Key] == None:
|
||||
elif self.TargetTxtDictionary[Key] is None:
|
||||
errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
|
||||
else:
|
||||
print "%-30s = %s" % (Key, self.TargetTxtDictionary[Key])
|
||||
|
@ -116,14 +116,14 @@ class TargetTool():
|
|||
Line = "%-30s = \n" % Key
|
||||
else:
|
||||
ret = GetConfigureKeyValue(self, Key)
|
||||
if ret != None:
|
||||
if ret is not None:
|
||||
Line = ret
|
||||
fw.write(Line)
|
||||
for key in self.TargetTxtDictionary.keys():
|
||||
if key not in existKeys:
|
||||
print "Warning: %s does not exist in original configuration file" % key
|
||||
Line = GetConfigureKeyValue(self, key)
|
||||
if Line == None:
|
||||
if Line is None:
|
||||
Line = "%-30s = " % key
|
||||
fw.write(Line)
|
||||
|
||||
|
@ -138,14 +138,14 @@ class TargetTool():
|
|||
|
||||
def GetConfigureKeyValue(self, Key):
|
||||
Line = None
|
||||
if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE != None:
|
||||
if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE is not None:
|
||||
dscFullPath = os.path.join(self.WorkSpace, self.Opt.DSCFILE)
|
||||
if os.path.exists(dscFullPath):
|
||||
Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
|
||||
else:
|
||||
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
|
||||
"DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
|
||||
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE != None:
|
||||
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE is not None:
|
||||
tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
|
||||
if os.path.exists(tooldefFullPath):
|
||||
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
|
||||
|
@ -157,15 +157,15 @@ def GetConfigureKeyValue(self, Key):
|
|||
Line = "%-30s = %s\n" % (Key, 'Enable')
|
||||
elif self.Opt.NUM <= 1:
|
||||
Line = "%-30s = %s\n" % (Key, 'Disable')
|
||||
elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM != None:
|
||||
elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM is not None:
|
||||
Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
|
||||
elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET != None:
|
||||
elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET is not None:
|
||||
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET))
|
||||
elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH != None:
|
||||
elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH is not None:
|
||||
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET_ARCH))
|
||||
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG != None:
|
||||
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG is not None:
|
||||
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_CHAIN_TAG)
|
||||
elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE != None:
|
||||
elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE is not None:
|
||||
buildruleFullPath = os.path.join(self.WorkSpace, self.Opt.BUILD_RULE_FILE)
|
||||
if os.path.exists(buildruleFullPath):
|
||||
Line = "%-30s = %s\n" % (Key, self.Opt.BUILD_RULE_FILE)
|
||||
|
@ -223,7 +223,7 @@ def MyOptionParser():
|
|||
if __name__ == '__main__':
|
||||
EdkLogger.Initialize()
|
||||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
if os.getenv('WORKSPACE') == None:
|
||||
if os.getenv('WORKSPACE') is None:
|
||||
print "ERROR: WORKSPACE should be specified or edksetup script should be executed before run TargetTool"
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -231,15 +231,15 @@ if __name__ == '__main__':
|
|||
if len(args) != 1 or (args[0].lower() != 'print' and args[0].lower() != 'clean' and args[0].lower() != 'set'):
|
||||
print "The number of args isn't 1 or the value of args is invalid."
|
||||
sys.exit(1)
|
||||
if opt.NUM != None and opt.NUM < 1:
|
||||
if opt.NUM is not None and opt.NUM < 1:
|
||||
print "The MAX_CONCURRENT_THREAD_NUMBER must be larger than 0."
|
||||
sys.exit(1)
|
||||
if opt.TARGET != None and len(opt.TARGET) > 1:
|
||||
if opt.TARGET is not None and len(opt.TARGET) > 1:
|
||||
for elem in opt.TARGET:
|
||||
if elem == '0':
|
||||
print "0 will clear the TARGET setting in target.txt and can't combine with other value."
|
||||
sys.exit(1)
|
||||
if opt.TARGET_ARCH != None and len(opt.TARGET_ARCH) > 1:
|
||||
if opt.TARGET_ARCH is not None and len(opt.TARGET_ARCH) > 1:
|
||||
for elem in opt.TARGET_ARCH:
|
||||
if elem == '0':
|
||||
print "0 will clear the TARGET_ARCH setting in target.txt and can't combine with other value."
|
||||
|
|
|
@ -173,7 +173,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
|
|||
elif PreprocessedFile == "" or InjectedFile != PreprocessedFile:
|
||||
continue
|
||||
|
||||
if LineIndexOfOriginalFile == None:
|
||||
if LineIndexOfOriginalFile is None:
|
||||
#
|
||||
# Any non-empty lines must be from original preprocessed file.
|
||||
# And this must be the first one.
|
||||
|
@ -193,7 +193,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
|
|||
# convert Decimal number format
|
||||
Line = gDecNumberPattern.sub(r"\1", Line)
|
||||
|
||||
if LineNumber != None:
|
||||
if LineNumber is not None:
|
||||
EdkLogger.verbose("Got line directive: line=%d" % LineNumber)
|
||||
# in case preprocessor removed some lines, like blank or comment lines
|
||||
if LineNumber <= len(NewLines):
|
||||
|
@ -216,10 +216,10 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
|
|||
Brace = 0
|
||||
for Index in range(len(Lines)):
|
||||
Line = Lines[Index]
|
||||
if MulPatternFlag == False and gTypedef_MulPattern.search(Line) == None:
|
||||
if SinglePatternFlag == False and gTypedef_SinglePattern.search(Line) == None:
|
||||
if MulPatternFlag == False and gTypedef_MulPattern.search(Line) is None:
|
||||
if SinglePatternFlag == False and gTypedef_SinglePattern.search(Line) is None:
|
||||
# remove "#pragram pack" directive
|
||||
if gPragmaPattern.search(Line) == None:
|
||||
if gPragmaPattern.search(Line) is None:
|
||||
NewLines.append(Line)
|
||||
continue
|
||||
elif SinglePatternFlag == False:
|
||||
|
@ -282,9 +282,9 @@ def TrimPreprocessedVfr(Source, Target):
|
|||
Lines[Index] = "\n"
|
||||
continue
|
||||
|
||||
if FoundTypedef == False and gTypedefPattern.search(Line) == None:
|
||||
if FoundTypedef == False and gTypedefPattern.search(Line) is None:
|
||||
# keep "#pragram pack" directive
|
||||
if gPragmaPattern.search(Line) == None:
|
||||
if gPragmaPattern.search(Line) is None:
|
||||
Lines[Index] = "\n"
|
||||
continue
|
||||
elif FoundTypedef == False:
|
||||
|
@ -510,7 +510,7 @@ def TrimEdkSources(Source, Target):
|
|||
for FileName in Files:
|
||||
Dummy, Ext = os.path.splitext(FileName)
|
||||
if Ext.upper() not in ['.C', '.H']: continue
|
||||
if Target == None or Target == '':
|
||||
if Target is None or Target == '':
|
||||
TrimEdkSourceCode(
|
||||
os.path.join(CurrentDir, FileName),
|
||||
os.path.join(CurrentDir, FileName)
|
||||
|
@ -568,7 +568,7 @@ def TrimEdkSourceCode(Source, Target):
|
|||
|
||||
NewLines = None
|
||||
for Re,Repl in gImportCodePatterns:
|
||||
if NewLines == None:
|
||||
if NewLines is None:
|
||||
NewLines = Re.sub(Repl, Lines)
|
||||
else:
|
||||
NewLines = Re.sub(Repl, NewLines)
|
||||
|
@ -672,11 +672,11 @@ def Main():
|
|||
|
||||
try:
|
||||
if CommandOptions.FileType == "Vfr":
|
||||
if CommandOptions.OutputFile == None:
|
||||
if CommandOptions.OutputFile is None:
|
||||
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
|
||||
TrimPreprocessedVfr(InputFile, CommandOptions.OutputFile)
|
||||
elif CommandOptions.FileType == "Asl":
|
||||
if CommandOptions.OutputFile == None:
|
||||
if CommandOptions.OutputFile is None:
|
||||
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
|
||||
TrimAslFile(InputFile, CommandOptions.OutputFile, CommandOptions.IncludePathFile)
|
||||
elif CommandOptions.FileType == "EdkSourceCode":
|
||||
|
@ -684,13 +684,13 @@ def Main():
|
|||
elif CommandOptions.FileType == "VfrOffsetBin":
|
||||
GenerateVfrBinSec(CommandOptions.ModuleName, CommandOptions.DebugDir, CommandOptions.OutputFile)
|
||||
else :
|
||||
if CommandOptions.OutputFile == None:
|
||||
if CommandOptions.OutputFile is None:
|
||||
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
|
||||
TrimPreprocessedFile(InputFile, CommandOptions.OutputFile, CommandOptions.ConvertHex, CommandOptions.TrimLong)
|
||||
except FatalError, X:
|
||||
import platform
|
||||
import traceback
|
||||
if CommandOptions != None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9:
|
||||
if CommandOptions is not None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9:
|
||||
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
|
||||
return 1
|
||||
except:
|
||||
|
|
|
@ -104,12 +104,12 @@ class DependencyRules(object):
|
|||
# check whether satisfied by current distribution
|
||||
#
|
||||
if not Exist:
|
||||
if DpObj == None:
|
||||
if DpObj is None:
|
||||
Result = False
|
||||
break
|
||||
for GuidVerPair in DpObj.PackageSurfaceArea.keys():
|
||||
if Dep.GetGuid() == GuidVerPair[0]:
|
||||
if Dep.GetVersion() == None or \
|
||||
if Dep.GetVersion() is None or \
|
||||
len(Dep.GetVersion()) == 0:
|
||||
Result = True
|
||||
break
|
||||
|
|
|
@ -247,13 +247,13 @@ class IpiDatabase(object):
|
|||
def _AddDp(self, Guid, Version, NewDpFileName, DistributionFileName, \
|
||||
RePackage):
|
||||
|
||||
if Version == None or len(Version.strip()) == 0:
|
||||
if Version is None or len(Version.strip()) == 0:
|
||||
Version = 'N/A'
|
||||
|
||||
#
|
||||
# Add newly installed DP information to DB.
|
||||
#
|
||||
if NewDpFileName == None or len(NewDpFileName.strip()) == 0:
|
||||
if NewDpFileName is None or len(NewDpFileName.strip()) == 0:
|
||||
PkgFileName = 'N/A'
|
||||
else:
|
||||
PkgFileName = NewDpFileName
|
||||
|
@ -295,13 +295,13 @@ class IpiDatabase(object):
|
|||
#
|
||||
def _AddPackage(self, Guid, Version, DpGuid=None, DpVersion=None, Path=''):
|
||||
|
||||
if Version == None or len(Version.strip()) == 0:
|
||||
if Version is None or len(Version.strip()) == 0:
|
||||
Version = 'N/A'
|
||||
|
||||
if DpGuid == None or len(DpGuid.strip()) == 0:
|
||||
if DpGuid is None or len(DpGuid.strip()) == 0:
|
||||
DpGuid = 'N/A'
|
||||
|
||||
if DpVersion == None or len(DpVersion.strip()) == 0:
|
||||
if DpVersion is None or len(DpVersion.strip()) == 0:
|
||||
DpVersion = 'N/A'
|
||||
|
||||
#
|
||||
|
@ -325,13 +325,13 @@ class IpiDatabase(object):
|
|||
def _AddModuleInPackage(self, Guid, Version, Name, PkgGuid=None, \
|
||||
PkgVersion=None, Path=''):
|
||||
|
||||
if Version == None or len(Version.strip()) == 0:
|
||||
if Version is None or len(Version.strip()) == 0:
|
||||
Version = 'N/A'
|
||||
|
||||
if PkgGuid == None or len(PkgGuid.strip()) == 0:
|
||||
if PkgGuid is None or len(PkgGuid.strip()) == 0:
|
||||
PkgGuid = 'N/A'
|
||||
|
||||
if PkgVersion == None or len(PkgVersion.strip()) == 0:
|
||||
if PkgVersion is None or len(PkgVersion.strip()) == 0:
|
||||
PkgVersion = 'N/A'
|
||||
|
||||
if os.name == 'posix':
|
||||
|
@ -361,13 +361,13 @@ class IpiDatabase(object):
|
|||
def _AddStandaloneModule(self, Guid, Version, Name, DpGuid=None, \
|
||||
DpVersion=None, Path=''):
|
||||
|
||||
if Version == None or len(Version.strip()) == 0:
|
||||
if Version is None or len(Version.strip()) == 0:
|
||||
Version = 'N/A'
|
||||
|
||||
if DpGuid == None or len(DpGuid.strip()) == 0:
|
||||
if DpGuid is None or len(DpGuid.strip()) == 0:
|
||||
DpGuid = 'N/A'
|
||||
|
||||
if DpVersion == None or len(DpVersion.strip()) == 0:
|
||||
if DpVersion is None or len(DpVersion.strip()) == 0:
|
||||
DpVersion = 'N/A'
|
||||
|
||||
#
|
||||
|
@ -391,10 +391,10 @@ class IpiDatabase(object):
|
|||
def _AddModuleDepex(self, Guid, Version, Name, Path, DepexGuid=None, \
|
||||
DepexVersion=None):
|
||||
|
||||
if DepexGuid == None or len(DepexGuid.strip()) == 0:
|
||||
if DepexGuid is None or len(DepexGuid.strip()) == 0:
|
||||
DepexGuid = 'N/A'
|
||||
|
||||
if DepexVersion == None or len(DepexVersion.strip()) == 0:
|
||||
if DepexVersion is None or len(DepexVersion.strip()) == 0:
|
||||
DepexVersion = 'N/A'
|
||||
|
||||
if os.name == 'posix':
|
||||
|
@ -510,7 +510,7 @@ class IpiDatabase(object):
|
|||
#
|
||||
def GetDp(self, Guid, Version):
|
||||
|
||||
if Version == None or len(Version.strip()) == 0:
|
||||
if Version is None or len(Version.strip()) == 0:
|
||||
Version = 'N/A'
|
||||
Logger.Verbose(ST.MSG_GET_DP_INSTALL_LIST)
|
||||
(DpGuid, DpVersion) = (Guid, Version)
|
||||
|
@ -642,7 +642,7 @@ class IpiDatabase(object):
|
|||
PackageVersion)
|
||||
self.Cur.execute(SqlCommand)
|
||||
|
||||
elif Version == None or len(Version.strip()) == 0:
|
||||
elif Version is None or len(Version.strip()) == 0:
|
||||
|
||||
SqlCommand = """select * from %s where PackageGuid ='%s'""" % \
|
||||
(self.PkgTable, Guid)
|
||||
|
|
|
@ -56,7 +56,7 @@ class PackageFile:
|
|||
ExtraData="%s (%s)" % (FileName, str(Xstr)))
|
||||
|
||||
BadFile = self._ZipFile.testzip()
|
||||
if BadFile != None:
|
||||
if BadFile is not None:
|
||||
Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
|
||||
ExtraData="[%s] in %s" % (BadFile, FileName))
|
||||
|
||||
|
|
|
@ -618,11 +618,11 @@ def GenSourceStatement(SourceFile, Family, FeatureFlag, TagName=None,
|
|||
# format of SourceFile|Family|TagName|ToolCode|FeatureFlag
|
||||
#
|
||||
Statement += SourceFile
|
||||
if TagName == None:
|
||||
if TagName is None:
|
||||
TagName = ''
|
||||
if ToolCode == None:
|
||||
if ToolCode is None:
|
||||
ToolCode = ''
|
||||
if HelpStr == None:
|
||||
if HelpStr is None:
|
||||
HelpStr = ''
|
||||
if FeatureFlag:
|
||||
Statement += '|' + Family + '|' + TagName + '|' + ToolCode + '|' + FeatureFlag
|
||||
|
|
|
@ -91,7 +91,7 @@ def InstallNewPackage(WorkspaceDir, Path, CustomPath = False):
|
|||
# @param PathList: The already installed standalone module Path list
|
||||
#
|
||||
def InstallNewModule(WorkspaceDir, Path, PathList = None):
|
||||
if PathList == None:
|
||||
if PathList is None:
|
||||
PathList = []
|
||||
Path = ConvertPath(Path)
|
||||
Path = os.path.normpath(Path)
|
||||
|
|
|
@ -555,15 +555,15 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
|
|||
# from HelpText
|
||||
#
|
||||
for Token in List[0:NumTokens]:
|
||||
if Usage == None and Token in UsageTokens:
|
||||
if Usage is None and Token in UsageTokens:
|
||||
Usage = UsageTokens[Token]
|
||||
HelpText = HelpText.replace(Token, '')
|
||||
if Usage != None or not ParseVariable:
|
||||
if Usage is not None or not ParseVariable:
|
||||
for Token in List[0:NumTokens]:
|
||||
if Type == None and Token in TypeTokens:
|
||||
if Type is None and Token in TypeTokens:
|
||||
Type = TypeTokens[Token]
|
||||
HelpText = HelpText.replace(Token, '')
|
||||
if Usage != None:
|
||||
if Usage is not None:
|
||||
for Token in List[0:NumTokens]:
|
||||
if Token in RemoveTokens:
|
||||
HelpText = HelpText.replace(Token, '')
|
||||
|
@ -571,13 +571,13 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
|
|||
#
|
||||
# If no Usage token is present and set Usage to UNDEFINED
|
||||
#
|
||||
if Usage == None:
|
||||
if Usage is None:
|
||||
Usage = 'UNDEFINED'
|
||||
|
||||
#
|
||||
# If no Type token is present and set Type to UNDEFINED
|
||||
#
|
||||
if Type == None:
|
||||
if Type is None:
|
||||
Type = 'UNDEFINED'
|
||||
|
||||
#
|
||||
|
|
|
@ -120,7 +120,7 @@ def GuidStructureStringToGuidString(GuidValue):
|
|||
# @param Directory: The directory name
|
||||
#
|
||||
def CreateDirectory(Directory):
|
||||
if Directory == None or Directory.strip() == "":
|
||||
if Directory is None or Directory.strip() == "":
|
||||
return True
|
||||
try:
|
||||
if not access(Directory, F_OK):
|
||||
|
@ -134,7 +134,7 @@ def CreateDirectory(Directory):
|
|||
# @param Directory: The directory name
|
||||
#
|
||||
def RemoveDirectory(Directory, Recursively=False):
|
||||
if Directory == None or Directory.strip() == "" or not \
|
||||
if Directory is None or Directory.strip() == "" or not \
|
||||
os.path.exists(Directory):
|
||||
return
|
||||
if Recursively:
|
||||
|
@ -237,7 +237,7 @@ def GetNonMetaDataFiles(Root, SkipList, FullPath, PrefixPath):
|
|||
#
|
||||
def ValidFile(File, Ext=None):
|
||||
File = File.replace('\\', '/')
|
||||
if Ext != None:
|
||||
if Ext is not None:
|
||||
FileExt = os.path.splitext(File)[1]
|
||||
if FileExt.lower() != Ext.lower():
|
||||
return False
|
||||
|
@ -423,7 +423,7 @@ class Sdict(IterableUserDict):
|
|||
## update method
|
||||
#
|
||||
def update(self, Dict=None, **Kwargs):
|
||||
if Dict != None:
|
||||
if Dict is not None:
|
||||
for Key1, Val1 in Dict.items():
|
||||
self[Key1] = Val1
|
||||
if len(Kwargs):
|
||||
|
@ -529,7 +529,7 @@ class PathClass(object):
|
|||
## _GetFileKey
|
||||
#
|
||||
def _GetFileKey(self):
|
||||
if self._Key == None:
|
||||
if self._Key is None:
|
||||
self._Key = self.Path.upper()
|
||||
return self._Key
|
||||
## Validate
|
||||
|
|
|
@ -128,7 +128,7 @@ def IsValidInfComponentType(ComponentType):
|
|||
#
|
||||
def IsValidToolFamily(ToolFamily):
|
||||
ReIsValieFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
|
||||
if ReIsValieFamily.match(ToolFamily) == None:
|
||||
if ReIsValieFamily.match(ToolFamily) is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -159,7 +159,7 @@ def IsValidArch(Arch):
|
|||
if Arch == 'common':
|
||||
return True
|
||||
ReIsValieArch = re.compile(r"^[a-zA-Z]+[a-zA-Z0-9]{0,}$", re.DOTALL)
|
||||
if ReIsValieArch.match(Arch) == None:
|
||||
if ReIsValieArch.match(Arch) is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -179,7 +179,7 @@ def IsValidFamily(Family):
|
|||
return True
|
||||
|
||||
ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
|
||||
if ReIsValidFamily.match(Family) == None:
|
||||
if ReIsValidFamily.match(Family) is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -199,13 +199,13 @@ def IsValidBuildOptionName(BuildOptionName):
|
|||
ReIsValidBuildOption1 = re.compile(r"^\s*(\*)|([A-Z][a-zA-Z0-9]*)$")
|
||||
ReIsValidBuildOption2 = re.compile(r"^\s*(\*)|([a-zA-Z][a-zA-Z0-9]*)$")
|
||||
|
||||
if ReIsValidBuildOption1.match(ToolOptionList[0]) == None:
|
||||
if ReIsValidBuildOption1.match(ToolOptionList[0]) is None:
|
||||
return False
|
||||
|
||||
if ReIsValidBuildOption1.match(ToolOptionList[1]) == None:
|
||||
if ReIsValidBuildOption1.match(ToolOptionList[1]) is None:
|
||||
return False
|
||||
|
||||
if ReIsValidBuildOption2.match(ToolOptionList[2]) == None:
|
||||
if ReIsValidBuildOption2.match(ToolOptionList[2]) is None:
|
||||
return False
|
||||
|
||||
if ToolOptionList[3] == "*" and ToolOptionList[4] not in ['FAMILY', 'DLL', 'DPATH']:
|
||||
|
@ -442,7 +442,7 @@ def IsValidDecVersion(Word):
|
|||
ReIsValidDecVersion = re.compile(r"[0-9]+\.?[0-9]+$")
|
||||
else:
|
||||
ReIsValidDecVersion = re.compile(r"[0-9]+$")
|
||||
if ReIsValidDecVersion.match(Word) == None:
|
||||
if ReIsValidDecVersion.match(Word) is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -457,7 +457,7 @@ def IsValidDecVersion(Word):
|
|||
#
|
||||
def IsValidHexVersion(Word):
|
||||
ReIsValidHexVersion = re.compile(r"[0][xX][0-9A-Fa-f]{8}$", re.DOTALL)
|
||||
if ReIsValidHexVersion.match(Word) == None:
|
||||
if ReIsValidHexVersion.match(Word) is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -471,7 +471,7 @@ def IsValidHexVersion(Word):
|
|||
#
|
||||
def IsValidBuildNumber(Word):
|
||||
ReIsValieBuildNumber = re.compile(r"[0-9]{1,4}$", re.DOTALL)
|
||||
if ReIsValieBuildNumber.match(Word) == None:
|
||||
if ReIsValieBuildNumber.match(Word) is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -488,7 +488,7 @@ def IsValidDepex(Word):
|
|||
return IsValidCFormatGuid(Word[Index+4:].strip())
|
||||
|
||||
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_\s\.]*$", re.DOTALL)
|
||||
if ReIsValidCName.match(Word) == None:
|
||||
if ReIsValidCName.match(Word) is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -585,11 +585,11 @@ def IsValidPcdValue(PcdValue):
|
|||
return True
|
||||
|
||||
ReIsValidIntegerSingle = re.compile(r"^\s*[0-9]\s*$", re.DOTALL)
|
||||
if ReIsValidIntegerSingle.match(PcdValue) != None:
|
||||
if ReIsValidIntegerSingle.match(PcdValue) is not None:
|
||||
return True
|
||||
|
||||
ReIsValidIntegerMulti = re.compile(r"^\s*[1-9][0-9]+\s*$", re.DOTALL)
|
||||
if ReIsValidIntegerMulti.match(PcdValue) != None:
|
||||
if ReIsValidIntegerMulti.match(PcdValue) is not None:
|
||||
return True
|
||||
|
||||
#
|
||||
|
@ -654,7 +654,7 @@ def IsValidPcdValue(PcdValue):
|
|||
#
|
||||
def IsValidCVariableName(CName):
|
||||
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
|
||||
if ReIsValidCName.match(CName) == None:
|
||||
if ReIsValidCName.match(CName) is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -669,7 +669,7 @@ def IsValidCVariableName(CName):
|
|||
#
|
||||
def IsValidIdentifier(Ident):
|
||||
ReIdent = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
|
||||
if ReIdent.match(Ident) == None:
|
||||
if ReIdent.match(Ident) is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -683,7 +683,7 @@ def IsValidIdentifier(Ident):
|
|||
def IsValidDecVersionVal(Ver):
|
||||
ReVersion = re.compile(r"[0-9]+(\.[0-9]{1,2})$")
|
||||
|
||||
if ReVersion.match(Ver) == None:
|
||||
if ReVersion.match(Ver) is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -134,7 +134,7 @@ def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo= -1):
|
|||
#
|
||||
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo= -1):
|
||||
Format = '<TokenSpaceGuidCName>.<PcdCName>'
|
||||
if TokenInfoString != '' and TokenInfoString != None:
|
||||
if TokenInfoString != '' and TokenInfoString is not None:
|
||||
TokenInfoList = GetSplitValueList(TokenInfoString, DataType.TAB_SPLIT)
|
||||
if len(TokenInfoList) == 2:
|
||||
return True
|
||||
|
@ -433,7 +433,7 @@ def GetComponents(Lines, KeyValues, CommentCharacter):
|
|||
LineList = Lines.split('\n')
|
||||
for Line in LineList:
|
||||
Line = CleanString(Line, CommentCharacter)
|
||||
if Line == None or Line == '':
|
||||
if Line is None or Line == '':
|
||||
continue
|
||||
|
||||
if FindBlock == False:
|
||||
|
@ -921,7 +921,7 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
|
|||
FileLocalMacros[Name] = Value
|
||||
|
||||
ReIsValidMacroName = re.compile(r"^[A-Z][A-Z0-9_]*$", re.DOTALL)
|
||||
if ReIsValidMacroName.match(Name) == None:
|
||||
if ReIsValidMacroName.match(Name) is None:
|
||||
Logger.Error('Parser',
|
||||
FORMAT_INVALID,
|
||||
ST.ERR_MACRONAME_INVALID % (Name),
|
||||
|
@ -940,7 +940,7 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
|
|||
# <UnicodeString>, <CArray> are subset of <AsciiString>.
|
||||
#
|
||||
ReIsValidMacroValue = re.compile(r"^[\x20-\x7e]*$", re.DOTALL)
|
||||
if ReIsValidMacroValue.match(Value) == None:
|
||||
if ReIsValidMacroValue.match(Value) is None:
|
||||
Logger.Error('Parser',
|
||||
FORMAT_INVALID,
|
||||
ST.ERR_MACROVALUE_INVALID % (Value),
|
||||
|
@ -979,7 +979,7 @@ def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
|
|||
else:
|
||||
Section = '[' + SectionName + ']'
|
||||
Content += '\n' + Section + '\n'
|
||||
if StatementList != None:
|
||||
if StatementList is not None:
|
||||
for Statement in StatementList:
|
||||
LineList = Statement.split('\n')
|
||||
NewStatement = ""
|
||||
|
|
|
@ -166,7 +166,7 @@ def SplitModuleType(Key):
|
|||
#
|
||||
def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None, FileName=None, Flag=False):
|
||||
LastString = String
|
||||
if MacroDefinitions == None:
|
||||
if MacroDefinitions is None:
|
||||
MacroDefinitions = {}
|
||||
while MacroDefinitions:
|
||||
QuotedStringList = []
|
||||
|
@ -244,7 +244,7 @@ def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None
|
|||
#
|
||||
def NormPath(Path, Defines=None):
|
||||
IsRelativePath = False
|
||||
if Defines == None:
|
||||
if Defines is None:
|
||||
Defines = {}
|
||||
if Path:
|
||||
if Path[0] == '.':
|
||||
|
@ -524,7 +524,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
|
|||
# to be checked
|
||||
#
|
||||
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
|
||||
if CheckFilename != '' and CheckFilename != None:
|
||||
if CheckFilename != '' and CheckFilename is not None:
|
||||
(Root, Ext) = os.path.splitext(CheckFilename)
|
||||
if Ext.upper() != ExtName.upper() and Root:
|
||||
ContainerFile = open(ContainerFilename, 'r').read()
|
||||
|
@ -552,7 +552,7 @@ def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line,
|
|||
#
|
||||
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
|
||||
CheckFile = ''
|
||||
if CheckFilename != '' and CheckFilename != None:
|
||||
if CheckFilename != '' and CheckFilename is not None:
|
||||
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
|
||||
if not os.path.isfile(CheckFile):
|
||||
ContainerFile = open(ContainerFilename, 'r').read()
|
||||
|
|
|
@ -161,7 +161,7 @@ def GetLanguageCode1766(LangName, File=None):
|
|||
for Key in gLANG_CONV_TABLE.keys():
|
||||
if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
|
||||
return Key
|
||||
if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None and LangName[3] == '-':
|
||||
if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
|
||||
for Key in gLANG_CONV_TABLE.keys():
|
||||
if Key == LangName[0:3].lower():
|
||||
return Key
|
||||
|
@ -186,7 +186,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
|
|||
if IsCompatibleMode:
|
||||
if length == 3 and LangName.isalpha():
|
||||
TempLangName = gLANG_CONV_TABLE.get(LangName.lower())
|
||||
if TempLangName != None:
|
||||
if TempLangName is not None:
|
||||
return TempLangName
|
||||
return LangName
|
||||
else:
|
||||
|
@ -200,7 +200,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
|
|||
if LangName.isalpha():
|
||||
return LangName
|
||||
elif length == 3:
|
||||
if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None:
|
||||
if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None:
|
||||
return LangName
|
||||
elif length == 5:
|
||||
if LangName[0:2].isalpha() and LangName[2] == '-':
|
||||
|
@ -208,7 +208,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
|
|||
elif length >= 6:
|
||||
if LangName[0:2].isalpha() and LangName[2] == '-':
|
||||
return LangName
|
||||
if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None and LangName[3] == '-':
|
||||
if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
|
||||
return LangName
|
||||
|
||||
EdkLogger.Error("Unicode File Parser",
|
||||
|
@ -270,14 +270,14 @@ class StringDefClassObject(object):
|
|||
self.UseOtherLangDef = UseOtherLangDef
|
||||
self.Length = 0
|
||||
|
||||
if Name != None:
|
||||
if Name is not None:
|
||||
self.StringName = Name
|
||||
self.StringNameByteList = UniToHexList(Name)
|
||||
if Value != None:
|
||||
if Value is not None:
|
||||
self.StringValue = Value
|
||||
self.StringValueByteList = UniToHexList(self.StringValue)
|
||||
self.Length = len(self.StringValueByteList)
|
||||
if Token != None:
|
||||
if Token is not None:
|
||||
self.Token = Token
|
||||
|
||||
def __str__(self):
|
||||
|
@ -288,7 +288,7 @@ class StringDefClassObject(object):
|
|||
repr(self.UseOtherLangDef)
|
||||
|
||||
def UpdateValue(self, Value = None):
|
||||
if Value != None:
|
||||
if Value is not None:
|
||||
if self.StringValue:
|
||||
self.StringValue = self.StringValue + '\r\n' + Value
|
||||
else:
|
||||
|
@ -393,7 +393,7 @@ class UniFileClassObject(object):
|
|||
# Check the string name is the upper character
|
||||
if Name != '':
|
||||
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
|
||||
if MatchString == None or MatchString.end(0) != len(Name):
|
||||
if MatchString is None or MatchString.end(0) != len(Name):
|
||||
EdkLogger.Error("Unicode File Parser",
|
||||
ToolError.FORMAT_INVALID,
|
||||
'The string token name %s in UNI file %s must be upper case character.' %(Name, self.File))
|
||||
|
@ -798,7 +798,7 @@ class UniFileClassObject(object):
|
|||
# Load a .uni file
|
||||
#
|
||||
def LoadUniFile(self, File = None):
|
||||
if File == None:
|
||||
if File is None:
|
||||
EdkLogger.Error("Unicode File Parser",
|
||||
ToolError.PARSER_ERROR,
|
||||
Message='No unicode file is given',
|
||||
|
@ -901,7 +901,7 @@ class UniFileClassObject(object):
|
|||
IsAdded = True
|
||||
if Name in self.OrderedStringDict[Language]:
|
||||
IsAdded = False
|
||||
if Value != None:
|
||||
if Value is not None:
|
||||
ItemIndexInList = self.OrderedStringDict[Language][Name]
|
||||
Item = self.OrderedStringList[Language][ItemIndexInList]
|
||||
Item.UpdateValue(Value)
|
||||
|
|
|
@ -36,14 +36,14 @@ import Logger.Log as Logger
|
|||
def CreateXmlElement(Name, String, NodeList, AttributeList):
|
||||
Doc = xml.dom.minidom.Document()
|
||||
Element = Doc.createElement(Name)
|
||||
if String != '' and String != None:
|
||||
if String != '' and String is not None:
|
||||
Element.appendChild(Doc.createTextNode(String))
|
||||
|
||||
for Item in NodeList:
|
||||
if type(Item) == type([]):
|
||||
Key = Item[0]
|
||||
Value = Item[1]
|
||||
if Key != '' and Key != None and Value != '' and Value != None:
|
||||
if Key != '' and Key is not None and Value != '' and Value is not None:
|
||||
Node = Doc.createElement(Key)
|
||||
Node.appendChild(Doc.createTextNode(Value))
|
||||
Element.appendChild(Node)
|
||||
|
@ -52,7 +52,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
|
|||
for Item in AttributeList:
|
||||
Key = Item[0]
|
||||
Value = Item[1]
|
||||
if Key != '' and Key != None and Value != '' and Value != None:
|
||||
if Key != '' and Key is not None and Value != '' and Value is not None:
|
||||
Element.setAttribute(Key, Value)
|
||||
|
||||
return Element
|
||||
|
@ -66,7 +66,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
|
|||
# @param String A XPath style path.
|
||||
#
|
||||
def XmlList(Dom, String):
|
||||
if String == None or String == "" or Dom == None or Dom == "":
|
||||
if String is None or String == "" or Dom is None or Dom == "":
|
||||
return []
|
||||
if Dom.nodeType == Dom.DOCUMENT_NODE:
|
||||
Dom = Dom.documentElement
|
||||
|
@ -101,7 +101,7 @@ def XmlList(Dom, String):
|
|||
# @param String A XPath style path.
|
||||
#
|
||||
def XmlNode(Dom, String):
|
||||
if String == None or String == "" or Dom == None or Dom == "":
|
||||
if String is None or String == "" or Dom is None or Dom == "":
|
||||
return None
|
||||
if Dom.nodeType == Dom.DOCUMENT_NODE:
|
||||
Dom = Dom.documentElement
|
||||
|
|
|
@ -134,7 +134,7 @@ def Debug(Level, Message, ExtraData=None):
|
|||
"msg" : Message,
|
||||
}
|
||||
|
||||
if ExtraData != None:
|
||||
if ExtraData is not None:
|
||||
LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict + "\n %s" % ExtraData
|
||||
else:
|
||||
LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict
|
||||
|
@ -165,10 +165,10 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
|
|||
#
|
||||
# if no tool name given, use caller's source file name as tool name
|
||||
#
|
||||
if ToolName == None or ToolName == "":
|
||||
if ToolName is None or ToolName == "":
|
||||
ToolName = os.path.basename(extract_stack()[-2][0])
|
||||
|
||||
if Line == None:
|
||||
if Line is None:
|
||||
Line = "..."
|
||||
else:
|
||||
Line = "%d" % Line
|
||||
|
@ -180,12 +180,12 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
|
|||
"msg" : Message,
|
||||
}
|
||||
|
||||
if File != None:
|
||||
if File is not None:
|
||||
LogText = _WARNING_MESSAGE_TEMPLATE % TemplateDict
|
||||
else:
|
||||
LogText = _WARNING_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
|
||||
|
||||
if ExtraData != None:
|
||||
if ExtraData is not None:
|
||||
LogText += "\n %s" % ExtraData
|
||||
|
||||
_INFO_LOGGER.log(WARN, LogText)
|
||||
|
@ -215,18 +215,18 @@ def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
|
|||
ExtraData=None, RaiseError=IS_RAISE_ERROR):
|
||||
if ToolName:
|
||||
pass
|
||||
if Line == None:
|
||||
if Line is None:
|
||||
Line = "..."
|
||||
else:
|
||||
Line = "%d" % Line
|
||||
|
||||
if Message == None:
|
||||
if Message is None:
|
||||
if ErrorCode in gERROR_MESSAGE:
|
||||
Message = gERROR_MESSAGE[ErrorCode]
|
||||
else:
|
||||
Message = gERROR_MESSAGE[UNKNOWN_ERROR]
|
||||
|
||||
if ExtraData == None:
|
||||
if ExtraData is None:
|
||||
ExtraData = ""
|
||||
|
||||
TemplateDict = {
|
||||
|
@ -238,7 +238,7 @@ def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
|
|||
"extra" : ExtraData
|
||||
}
|
||||
|
||||
if File != None:
|
||||
if File is not None:
|
||||
LogText = _ERROR_MESSAGE_TEMPLATE % TemplateDict
|
||||
else:
|
||||
LogText = __ERROR_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
|
||||
|
|
|
@ -73,7 +73,7 @@ def CheckForExistingDp(Path):
|
|||
#
|
||||
#
|
||||
def Main(Options = None):
|
||||
if Options == None:
|
||||
if Options is None:
|
||||
Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
|
||||
try:
|
||||
DataBase = GlobalData.gDB
|
||||
|
|
|
@ -271,7 +271,7 @@ class InfBinariesObject(InfSectionCommonDef):
|
|||
#
|
||||
pass
|
||||
|
||||
if InfBianryVerItemObj != None:
|
||||
if InfBianryVerItemObj is not None:
|
||||
if self.Binaries.has_key((InfBianryVerItemObj)):
|
||||
BinariesList = self.Binaries[InfBianryVerItemObj]
|
||||
BinariesList.append((InfBianryVerItemObj, VerComment))
|
||||
|
@ -521,7 +521,7 @@ class InfBinariesObject(InfSectionCommonDef):
|
|||
# #
|
||||
# pass
|
||||
|
||||
if InfBianryCommonItemObj != None:
|
||||
if InfBianryCommonItemObj is not None:
|
||||
if self.Binaries.has_key((InfBianryCommonItemObj)):
|
||||
BinariesList = self.Binaries[InfBianryCommonItemObj]
|
||||
BinariesList.append((InfBianryCommonItemObj, ItemComment))
|
||||
|
@ -538,11 +538,11 @@ class InfBinariesObject(InfSectionCommonDef):
|
|||
#
|
||||
# Validate Arch
|
||||
#
|
||||
if (ArchItem == '' or ArchItem == None):
|
||||
if (ArchItem == '' or ArchItem is None):
|
||||
ArchItem = 'COMMON'
|
||||
__SupArchList.append(ArchItem)
|
||||
|
||||
if UiInf != None:
|
||||
if UiInf is not None:
|
||||
if len(UiInf) > 0:
|
||||
#
|
||||
# Check UI
|
||||
|
@ -672,7 +672,7 @@ class InfBinariesObject(InfSectionCommonDef):
|
|||
# #
|
||||
# pass
|
||||
|
||||
if InfBianryUiItemObj != None:
|
||||
if InfBianryUiItemObj is not None:
|
||||
if self.Binaries.has_key((InfBianryUiItemObj)):
|
||||
BinariesList = self.Binaries[InfBianryUiItemObj]
|
||||
BinariesList.append((InfBianryUiItemObj, UiComment))
|
||||
|
@ -681,7 +681,7 @@ class InfBinariesObject(InfSectionCommonDef):
|
|||
BinariesList = []
|
||||
BinariesList.append((InfBianryUiItemObj, UiComment))
|
||||
self.Binaries[InfBianryUiItemObj] = BinariesList
|
||||
if Ver != None and len(Ver) > 0:
|
||||
if Ver is not None and len(Ver) > 0:
|
||||
self.CheckVer(Ver, __SupArchList)
|
||||
if CommonBinary and len(CommonBinary) > 0:
|
||||
self.ParseCommonBinary(CommonBinary, __SupArchList)
|
||||
|
|
|
@ -62,7 +62,7 @@ class InfDefSectionOptionRomInfo():
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.PciVendorId != None:
|
||||
if self.PciVendorId is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_VENDOR_ID),
|
||||
LineInfo=self.CurrentLine)
|
||||
return False
|
||||
|
@ -86,7 +86,7 @@ class InfDefSectionOptionRomInfo():
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.PciDeviceId != None:
|
||||
if self.PciDeviceId is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_DEVICE_ID),
|
||||
LineInfo=self.CurrentLine)
|
||||
return False
|
||||
|
@ -110,7 +110,7 @@ class InfDefSectionOptionRomInfo():
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.PciClassCode != None:
|
||||
if self.PciClassCode is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_CLASS_CODE),
|
||||
LineInfo=self.CurrentLine)
|
||||
return False
|
||||
|
@ -135,7 +135,7 @@ class InfDefSectionOptionRomInfo():
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.PciRevision != None:
|
||||
if self.PciRevision is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_REVISION),
|
||||
LineInfo=self.CurrentLine)
|
||||
return False
|
||||
|
@ -159,7 +159,7 @@ class InfDefSectionOptionRomInfo():
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.PciCompress != None:
|
||||
if self.PciCompress is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_COMPRESS),
|
||||
LineInfo=self.CurrentLine)
|
||||
return False
|
||||
|
@ -215,11 +215,11 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.BaseName != None:
|
||||
if self.BaseName is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_BASE_NAME),
|
||||
LineInfo=self.CurrentLine)
|
||||
return False
|
||||
if not (BaseName == '' or BaseName == None):
|
||||
if not (BaseName == '' or BaseName is None):
|
||||
if IsValidWord(BaseName) and not BaseName.startswith("_"):
|
||||
self.BaseName = InfDefMember()
|
||||
self.BaseName.SetValue(BaseName)
|
||||
|
@ -243,7 +243,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.FileGuid != None:
|
||||
if self.FileGuid is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
|
||||
%(DT.TAB_INF_DEFINES_FILE_GUID),
|
||||
LineInfo=self.CurrentLine)
|
||||
|
@ -274,7 +274,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.ModuleType != None:
|
||||
if self.ModuleType is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
|
||||
%(DT.TAB_INF_DEFINES_MODULE_TYPE),
|
||||
LineInfo=self.CurrentLine)
|
||||
|
@ -309,7 +309,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
def SetModuleUniFileName(self, ModuleUniFileName, Comments):
|
||||
if Comments:
|
||||
pass
|
||||
if self.ModuleUniFileName != None:
|
||||
if self.ModuleUniFileName is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_MODULE_UNI_FILE),
|
||||
LineInfo=self.CurrentLine)
|
||||
self.ModuleUniFileName = ModuleUniFileName
|
||||
|
@ -327,7 +327,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.InfVersion != None:
|
||||
if self.InfVersion is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
|
||||
%(DT.TAB_INF_DEFINES_INF_VERSION),
|
||||
LineInfo=self.CurrentLine)
|
||||
|
@ -368,7 +368,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.EdkReleaseVersion != None:
|
||||
if self.EdkReleaseVersion is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
|
||||
%(DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION),
|
||||
LineInfo=self.CurrentLine)
|
||||
|
@ -401,7 +401,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.UefiSpecificationVersion != None:
|
||||
if self.UefiSpecificationVersion is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
|
||||
%(DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION),
|
||||
LineInfo=self.CurrentLine)
|
||||
|
@ -434,7 +434,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.PiSpecificationVersion != None:
|
||||
if self.PiSpecificationVersion is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
|
||||
%(DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION),
|
||||
LineInfo=self.CurrentLine)
|
||||
|
@ -495,7 +495,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.VersionString != None:
|
||||
if self.VersionString is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
|
||||
%(DT.TAB_INF_DEFINES_VERSION_STRING),
|
||||
LineInfo=self.CurrentLine)
|
||||
|
@ -517,7 +517,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.PcdIsDriver != None:
|
||||
if self.PcdIsDriver is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
|
||||
%(DT.TAB_INF_DEFINES_PCD_IS_DRIVER),
|
||||
LineInfo=self.CurrentLine)
|
||||
|
@ -710,7 +710,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.Shadow != None:
|
||||
if self.Shadow is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_SHADOW),
|
||||
LineInfo=self.CurrentLine)
|
||||
return False
|
||||
|
@ -731,7 +731,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
# <CustomMake> ::= [<Family> "|"] <Filename>
|
||||
#
|
||||
def SetCustomMakefile(self, CustomMakefile, Comments):
|
||||
if not (CustomMakefile == '' or CustomMakefile == None):
|
||||
if not (CustomMakefile == '' or CustomMakefile is None):
|
||||
ValueList = GetSplitValueList(CustomMakefile)
|
||||
if len(ValueList) == 1:
|
||||
FileName = ValueList[0]
|
||||
|
@ -811,12 +811,12 @@ class InfDefSection(InfDefSectionOptionRomInfo):
|
|||
#
|
||||
# Value has been set before.
|
||||
#
|
||||
if self.UefiHiiResourceSection != None:
|
||||
if self.UefiHiiResourceSection is not None:
|
||||
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND
|
||||
%(DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION),
|
||||
LineInfo=self.CurrentLine)
|
||||
return False
|
||||
if not (UefiHiiResourceSection == '' or UefiHiiResourceSection == None):
|
||||
if not (UefiHiiResourceSection == '' or UefiHiiResourceSection is None):
|
||||
if (IsValidBoolType(UefiHiiResourceSection)):
|
||||
self.UefiHiiResourceSection = InfDefMember()
|
||||
self.UefiHiiResourceSection.SetValue(UefiHiiResourceSection)
|
||||
|
@ -948,7 +948,7 @@ class InfDefObject(InfSectionCommonDef):
|
|||
RaiseError=True)
|
||||
if Name == DT.TAB_INF_DEFINES_INF_VERSION:
|
||||
HasFoundInfVersionFalg = True
|
||||
if not (Name == '' or Name == None):
|
||||
if not (Name == '' or Name is None):
|
||||
#
|
||||
# Process "SPEC" Keyword definition.
|
||||
#
|
||||
|
@ -971,7 +971,7 @@ class InfDefObject(InfSectionCommonDef):
|
|||
LineInfo=LineInfo)
|
||||
else:
|
||||
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
|
||||
if (ProcessFunc != None):
|
||||
if (ProcessFunc is not None):
|
||||
ProcessFunc(DefineList, Value, InfLineCommentObj)
|
||||
self.Defines[ArchListString] = DefineList
|
||||
else:
|
||||
|
@ -991,7 +991,7 @@ class InfDefObject(InfSectionCommonDef):
|
|||
#
|
||||
else:
|
||||
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
|
||||
if (ProcessFunc != None):
|
||||
if (ProcessFunc is not None):
|
||||
ProcessFunc(DefineList, Value, InfLineCommentObj)
|
||||
self.Defines[ArchListString] = DefineList
|
||||
#
|
||||
|
|
|
@ -107,7 +107,7 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
|
|||
#
|
||||
# Get/Set Usage and HelpString
|
||||
#
|
||||
if CommentsList != None and len(CommentsList) != 0 :
|
||||
if CommentsList is not None and len(CommentsList) != 0 :
|
||||
CommentInsList = []
|
||||
PreUsage = None
|
||||
PreGuidType = None
|
||||
|
@ -126,7 +126,7 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
|
|||
[],
|
||||
True)
|
||||
|
||||
if CommentItemHelpText == None:
|
||||
if CommentItemHelpText is None:
|
||||
CommentItemHelpText = ''
|
||||
if Count == len(CommentsList) and CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
|
||||
CommentItemHelpText = DT.END_OF_LINE
|
||||
|
@ -236,7 +236,7 @@ class InfGuidObject():
|
|||
#
|
||||
# Validate Arch
|
||||
#
|
||||
if (ArchItem == '' or ArchItem == None):
|
||||
if (ArchItem == '' or ArchItem is None):
|
||||
ArchItem = 'COMMON'
|
||||
|
||||
__SupportArchList.append(ArchItem)
|
||||
|
|
|
@ -43,7 +43,7 @@ class InfHeaderObject():
|
|||
# @param FileName: File Name
|
||||
#
|
||||
def SetFileName(self, FileName):
|
||||
if not (FileName == '' or FileName == None):
|
||||
if not (FileName == '' or FileName is None):
|
||||
self.FileName = FileName
|
||||
return True
|
||||
else:
|
||||
|
@ -59,7 +59,7 @@ class InfHeaderObject():
|
|||
# @param Abstract: Abstract
|
||||
#
|
||||
def SetAbstract(self, Abstract):
|
||||
if not (Abstract == '' or Abstract == None):
|
||||
if not (Abstract == '' or Abstract is None):
|
||||
self.Abstract = Abstract
|
||||
return True
|
||||
else:
|
||||
|
@ -75,7 +75,7 @@ class InfHeaderObject():
|
|||
# @param Description: Description content
|
||||
#
|
||||
def SetDescription(self, Description):
|
||||
if not (Description == '' or Description == None):
|
||||
if not (Description == '' or Description is None):
|
||||
self.Description = Description
|
||||
return True
|
||||
else:
|
||||
|
@ -91,7 +91,7 @@ class InfHeaderObject():
|
|||
# @param Copyright: Copyright content
|
||||
#
|
||||
def SetCopyright(self, Copyright):
|
||||
if not (Copyright == '' or Copyright == None):
|
||||
if not (Copyright == '' or Copyright is None):
|
||||
self.Copyright = Copyright
|
||||
return True
|
||||
else:
|
||||
|
@ -107,7 +107,7 @@ class InfHeaderObject():
|
|||
# @param License: License content
|
||||
#
|
||||
def SetLicense(self, License):
|
||||
if not (License == '' or License == None):
|
||||
if not (License == '' or License is None):
|
||||
self.License = License
|
||||
return True
|
||||
else:
|
||||
|
|
|
@ -38,10 +38,10 @@ def GetArchModuleType(KeyList):
|
|||
#
|
||||
# Validate Arch
|
||||
#
|
||||
if (ArchItem == '' or ArchItem == None):
|
||||
if (ArchItem == '' or ArchItem is None):
|
||||
ArchItem = 'COMMON'
|
||||
|
||||
if (ModuleItem == '' or ModuleItem == None):
|
||||
if (ModuleItem == '' or ModuleItem is None):
|
||||
ModuleItem = 'COMMON'
|
||||
|
||||
if ArchItem not in __SupArchList:
|
||||
|
@ -136,7 +136,7 @@ class InfLibraryClassObject():
|
|||
LibItemObj.CurrentLine.SetLineNo(LibItem[2][1])
|
||||
LibItemObj.CurrentLine.SetLineString(LibItem[2][0])
|
||||
LibItem = LibItem[0]
|
||||
if HelpStringObj != None:
|
||||
if HelpStringObj is not None:
|
||||
LibItemObj.SetHelpString(HelpStringObj)
|
||||
if len(LibItem) >= 1:
|
||||
if LibItem[0].strip() != '':
|
||||
|
|
|
@ -135,9 +135,9 @@ class InfSpecialCommentObject(InfSectionCommonDef):
|
|||
# An encapsulate of Error for INF parser.
|
||||
#
|
||||
def ErrorInInf(Message=None, ErrorCode=None, LineInfo=None, RaiseError=True):
|
||||
if ErrorCode == None:
|
||||
if ErrorCode is None:
|
||||
ErrorCode = ToolError.FORMAT_INVALID
|
||||
if LineInfo == None:
|
||||
if LineInfo is None:
|
||||
LineInfo = ['', -1, '']
|
||||
Logger.Error("InfParser",
|
||||
ErrorCode,
|
||||
|
|
|
@ -75,7 +75,7 @@ class InfPackageObject():
|
|||
#
|
||||
# Validate Arch
|
||||
#
|
||||
if (ArchItem == '' or ArchItem == None):
|
||||
if (ArchItem == '' or ArchItem is None):
|
||||
ArchItem = 'COMMON'
|
||||
SupArchList.append(ArchItem)
|
||||
|
||||
|
@ -84,7 +84,7 @@ class InfPackageObject():
|
|||
HelpStringObj = PackageItem[1]
|
||||
CurrentLineOfPackItem = PackageItem[2]
|
||||
PackageItem = PackageItem[0]
|
||||
if HelpStringObj != None:
|
||||
if HelpStringObj is not None:
|
||||
HelpString = HelpStringObj.HeaderComments + HelpStringObj.TailComments
|
||||
PackageItemObj.SetHelpString(HelpString)
|
||||
if len(PackageItem) >= 1:
|
||||
|
@ -183,5 +183,5 @@ class InfPackageObject():
|
|||
return True
|
||||
|
||||
def GetPackages(self, Arch = None):
|
||||
if Arch == None:
|
||||
if Arch is None:
|
||||
return self.Packages
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue