Revision: 19369
http://sourceforge.net/p/edk2/code/19369
Author: vanjeff
Date: 2015-12-18 06:43:57 +0000 (Fri, 18 Dec 2015)
Log Message:
-----------
BaseTools: Clean some coding style issues
This patch clean some coding style issues, majorly for space character.
(Sync patch r19080 from main trunk.)
Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Yonghong Zhu <[email protected]>
Reviewed-by: Liming Gao <[email protected]>
Revision Links:
--------------
http://sourceforge.net/p/edk2/code/19080
Modified Paths:
--------------
branches/UDK2015/BaseTools/Source/Python/AutoGen/AutoGen.py
branches/UDK2015/BaseTools/Source/Python/AutoGen/BuildEngine.py
branches/UDK2015/BaseTools/Source/Python/AutoGen/GenDepex.py
branches/UDK2015/BaseTools/Source/Python/AutoGen/GenMake.py
branches/UDK2015/BaseTools/Source/Python/AutoGen/GenPcdDb.py
branches/UDK2015/BaseTools/Source/Python/AutoGen/StrGather.py
branches/UDK2015/BaseTools/Source/Python/AutoGen/UniClassObject.py
branches/UDK2015/BaseTools/Source/Python/BPDG/GenVpd.py
branches/UDK2015/BaseTools/Source/Python/Common/Dictionary.py
branches/UDK2015/BaseTools/Source/Python/Common/EdkIIWorkspace.py
branches/UDK2015/BaseTools/Source/Python/Common/FdfParserLite.py
branches/UDK2015/BaseTools/Source/Python/Common/MigrationUtilities.py
branches/UDK2015/BaseTools/Source/Python/Common/Misc.py
branches/UDK2015/BaseTools/Source/Python/Common/ToolDefClassObject.py
branches/UDK2015/BaseTools/Source/Python/GenFds/GenFds.py
branches/UDK2015/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
branches/UDK2015/BaseTools/Source/Python/GenFds/GuidSection.py
branches/UDK2015/BaseTools/Source/Python/GenFds/Region.py
branches/UDK2015/BaseTools/Source/Python/GenFds/UiSection.py
branches/UDK2015/BaseTools/Source/Python/GenFds/VerSection.py
branches/UDK2015/BaseTools/Source/Python/GenFds/Vtf.py
branches/UDK2015/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
branches/UDK2015/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
branches/UDK2015/BaseTools/Source/Python/Table/TableReport.py
branches/UDK2015/BaseTools/Source/Python/Workspace/MetaFileParser.py
branches/UDK2015/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
branches/UDK2015/BaseTools/Source/Python/build/BuildReport.py
branches/UDK2015/BaseTools/Source/Python/build/build.py
Modified: branches/UDK2015/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/AutoGen/AutoGen.py 2015-12-18
06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/AutoGen/AutoGen.py 2015-12-18
06:43:57 UTC (rev 19369)
@@ -232,7 +232,7 @@
# @param SkuId SKU id from command line
#
def _Init(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList,
MetaFileDb,
- BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None,
Fvs=None, Caps=None, SkuId='', UniFlag=None,
+ BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None,
Fvs=None, Caps=None, SkuId='', UniFlag=None,
Progress=None, BuildModule=None):
if Fds is None:
Fds = []
@@ -280,7 +280,7 @@
# Validate build target
if self.BuildTarget not in self.Platform.BuildTargets:
- EdkLogger.error("build", PARAMETER_INVALID,
+ EdkLogger.error("build", PARAMETER_INVALID,
ExtraData="Build target [%s] is not supported by
the platform. [Valid target: %s]"
% (self.BuildTarget, "
".join(self.Platform.BuildTargets)))
@@ -288,30 +288,30 @@
# parse FDF file to get PCDs in it, if any
if not self.FdfFile:
self.FdfFile = self.Platform.FlashDefinition
-
+
EdkLogger.info("")
if self.ArchList:
EdkLogger.info('%-16s = %s' % ("Architecture(s)", '
'.join(self.ArchList)))
EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))
- EdkLogger.info('%-16s = %s' % ("Toolchain",self.ToolChain))
-
+ EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))
+
EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))
if BuildModule:
EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))
-
+
if self.FdfFile:
EdkLogger.info('%-24s = %s' % ("Flash Image Definition",
self.FdfFile))
EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)
-
+
if Progress:
Progress.Start("\nProcessing meta-data")
-
+
if self.FdfFile:
#
# Mark now build in AutoGen Phase
#
- GlobalData.gAutoGenPhase = True
+ GlobalData.gAutoGenPhase = True
Fdf = FdfParser(self.FdfFile.Path)
Fdf.ParseFile()
GlobalData.gFdfParser = Fdf
@@ -336,7 +336,7 @@
if self.CapTargetList:
EdkLogger.info("No flash definition file found. Capsule [%s]
will be ignored." % " ".join(self.CapTargetList))
self.CapTargetList = []
-
+
# apply SKU and inject PCDs from Flash Definition file
for Arch in self.ArchList:
Platform = self.BuildDatabase[self.MetaFile, Arch, Target,
Toolchain]
@@ -391,12 +391,12 @@
Pa.CollectPlatformDynamicPcds()
Pa.CollectFixedAtBuildPcds()
self.AutoGenObjectList.append(Pa)
-
+
#
# Check PCDs token value conflict in each DEC file.
#
self._CheckAllPcdsTokenValueConflict()
-
+
#
# Check PCD type and definition between DSC and DEC
#
@@ -425,7 +425,7 @@
#
# Get INF file GUID
#
- InfFoundFlag = False
+ InfFoundFlag = False
for Pa in self.AutoGenObjectList:
if InfFoundFlag:
break
@@ -436,9 +436,9 @@
_GuidDict[Module.Guid.upper()] = FfsFile
break
else:
- EdkLogger.error("build",
+ EdkLogger.error("build",
FORMAT_INVALID,
- "Duplicate GUID found for
these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum,
+ "Duplicate GUID found for
these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent,
_GuidDict[Module.Guid.upper()].CurrentLineNum,
_GuidDict[Module.Guid.upper()].CurrentLineContent,
@@ -452,7 +452,7 @@
InfPath = NormPath(FfsFile.InfFileName)
if not os.path.exists(InfPath):
EdkLogger.error('build', GENFDS_ERROR,
"Non-existant Module %s !" % (FfsFile.InfFileName))
-
+
PathClassObj = PathClass(FfsFile.InfFileName,
self.WorkspaceDir)
#
# Here we just need to get FILE_GUID from INF
file, use 'COMMON' as ARCH attribute. and use
@@ -462,19 +462,19 @@
if not InfObj.Guid.upper() in _GuidDict.keys():
_GuidDict[InfObj.Guid.upper()] = FfsFile
else:
- EdkLogger.error("build",
+ EdkLogger.error("build",
FORMAT_INVALID,
- "Duplicate GUID found for
these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum,
+ "Duplicate GUID found for
these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent,
_GuidDict[InfObj.Guid.upper()].CurrentLineNum,
_GuidDict[InfObj.Guid.upper()].CurrentLineContent,
InfObj.Guid.upper()),
ExtraData=self.FdfFile)
InfFoundFlag = False
-
+
if FfsFile.NameGuid != None:
_CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$")
-
+
#
# If the NameGuid reference a PCD name.
# The style must match: PCD(xxxx.yyy)
@@ -493,52 +493,52 @@
# First convert from CFormatGuid to
GUID string
#
_PcdGuidString =
GuidStructureStringToGuidString(PcdItem.DefaultValue)
-
+
if not _PcdGuidString:
#
# Then try Byte array.
#
_PcdGuidString =
GuidStructureByteArrayToGuidString(PcdItem.DefaultValue)
-
+
if not _PcdGuidString:
#
# Not Byte array or CFormat GUID,
raise error.
#
EdkLogger.error("build",
FORMAT_INVALID,
- "The format of PCD
value is incorrect. PCD: %s , Value: %s\n"%(_PcdName, PcdItem.DefaultValue),
+ "The format of PCD
value is incorrect. PCD: %s , Value: %s\n" % (_PcdName, PcdItem.DefaultValue),
ExtraData=self.FdfFile)
-
- if not _PcdGuidString.upper() in
_GuidDict.keys():
+
+ if not _PcdGuidString.upper() in
_GuidDict.keys():
_GuidDict[_PcdGuidString.upper()]
= FfsFile
PcdFoundFlag = True
break
else:
- EdkLogger.error("build",
+ EdkLogger.error("build",
FORMAT_INVALID,
- "Duplicate GUID
found for these lines: Line %d: %s and Line %d: %s. GUID:
%s"%(FfsFile.CurrentLineNum,
+ "Duplicate GUID
found for these lines: Line %d: %s and Line %d: %s. GUID: %s" %
(FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent,
_GuidDict[_PcdGuidString.upper()].CurrentLineNum,
_GuidDict[_PcdGuidString.upper()].CurrentLineContent,
FfsFile.NameGuid.upper()),
-
ExtraData=self.FdfFile)
-
+
ExtraData=self.FdfFile)
+
if not FfsFile.NameGuid.upper() in _GuidDict.keys():
_GuidDict[FfsFile.NameGuid.upper()] = FfsFile
else:
#
# Two raw file GUID conflict.
#
- EdkLogger.error("build",
+ EdkLogger.error("build",
FORMAT_INVALID,
- "Duplicate GUID found for these lines:
Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum,
+ "Duplicate GUID found for these lines:
Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent,
_GuidDict[FfsFile.NameGuid.upper()].CurrentLineNum,
_GuidDict[FfsFile.NameGuid.upper()].CurrentLineContent,
FfsFile.NameGuid.upper()),
ExtraData=self.FdfFile)
-
+
def _CheckPcdDefineAndType(self):
PcdTypeList = [
"FixedAtBuild", "PatchableInModule", "FeatureFlag",
@@ -552,17 +552,17 @@
# Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid
for Pcd in Pa.Platform.Pcds:
PcdType = Pa.Platform.Pcds[Pcd].Type
-
+
# If no PCD type, this PCD comes from FDF
if not PcdType:
continue
-
+
# Try to remove Hii and Vpd suffix
if PcdType.startswith("DynamicEx"):
PcdType = "DynamicEx"
elif PcdType.startswith("Dynamic"):
PcdType = "Dynamic"
-
+
for Package in Pa.PackageList:
# Key of DEC's Pcds dictionary is PcdCName,
TokenSpaceGuid, PcdType
if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:
@@ -640,7 +640,7 @@
# BuildCommand should be all the same. So just get one from
platform AutoGen
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
return self._BuildCommand
-
+
## Check the PCDs token value conflict in each DEC file.
#
# Will cause build break and raise error message while two PCDs conflict.
@@ -672,12 +672,12 @@
#
# Sort same token value PCD list with TokenGuid and
TokenCName
#
- SameTokenValuePcdList.sort(lambda x, y:
cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName),
"%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName)))
- SameTokenValuePcdListCount = 0
+ SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s" %
(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName,
y.TokenCName)))
+ SameTokenValuePcdListCount = 0
while (SameTokenValuePcdListCount <
len(SameTokenValuePcdList) - 1):
- TemListItem =
SameTokenValuePcdList[SameTokenValuePcdListCount]
- TemListItemNext =
SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
-
+ TemListItem =
SameTokenValuePcdList[SameTokenValuePcdListCount]
+ TemListItemNext =
SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
+
if (TemListItem.TokenSpaceGuidCName ==
TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName !=
TemListItemNext.TokenCName):
EdkLogger.error(
'build',
@@ -689,13 +689,13 @@
SameTokenValuePcdListCount += 1
Count += SameTokenValuePcdListCount
Count += 1
-
+
PcdList = Package.Pcds.values()
- PcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName,
x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName)))
+ PcdList.sort(lambda x, y: cmp("%s.%s" %
(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName,
y.TokenCName)))
Count = 0
while (Count < len(PcdList) - 1) :
Item = PcdList[Count]
- ItemNext = PcdList[Count + 1]
+ ItemNext = PcdList[Count + 1]
#
# Check PCDs with same TokenSpaceGuidCName.TokenCName have
same token value as well.
#
@@ -786,7 +786,7 @@
"0x01001" : 3, #
******_TOOLCHAIN_****_***********_ATTRIBUTE
"0x10001" : 2, #
TARGET_*********_****_***********_ATTRIBUTE
"0x00001" : 1} #
******_*********_****_***********_ATTRIBUTE (Lowest)
-
+
## The real constructor of PlatformAutoGen
#
# This method is not supposed to be called by users of PlatformAutoGen.
It's
@@ -960,8 +960,8 @@
#GuidValue.update(M.Guids)
self.Platform.Modules[F].M = M
-
- for PcdFromModule in M.ModulePcdList+M.LibraryPcdList:
+
+ for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
# make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == "VOID*" and
PcdFromModule.MaxDatumSize in [None, '']:
NoDatumTypePcdList.add("%s.%s [%s]" %
(PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))
@@ -1111,9 +1111,9 @@
if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string
type
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
Sku.VpdOffset = Sku.VpdOffset.strip()
-
+
PcdValue = Sku.DefaultValue
if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
# if found PCD which datum value is unicode string the
insert to left size of UnicodeIndex
@@ -1124,10 +1124,10 @@
else:
OtherPcdArray.append(Pcd)
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
- VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] =
Pcd
-
+ VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
+
PlatformPcds = self.Platform.Pcds.keys()
- PlatformPcds.sort()
+ PlatformPcds.sort()
#
# Add VPD type PCD into VpdFile and determine whether the VPD PCD
need to be fixed up.
#
@@ -1145,8 +1145,8 @@
if self.Platform.VpdToolGuid == None or
self.Platform.VpdToolGuid == '':
EdkLogger.error("Build", FILE_NOT_FOUND, \
"Fail to find third-party BPDG
tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt
and VPD_TOOL_GUID need to be provided in DSC file.")
-
-
+
+
#
# Fix the PCDs define in VPD PCD section that never referenced by
module.
# An example is PCD for signature usage.
@@ -1161,7 +1161,7 @@
if (VpdPcd.TokenSpaceGuidCName ==
DscPcdEntry.TokenSpaceGuidCName) and \
(VpdPcd.TokenCName == DscPcdEntry.TokenCName):
FoundFlag = True
-
+
# Not found, it should be signature
if not FoundFlag :
# just pick the a value to determine whether is
unicode string type
@@ -1211,7 +1211,7 @@
VpdFile.GetCount() != 0:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in
DSC file %s which is required when DSC contains VPD PCD." %
str(self.Platform.MetaFile))
-
+
if VpdFile.GetCount() != 0:
DscTimeStamp = self.Platform.MetaFile.TimeStamp
FvPath = os.path.join(self.BuildDir, "FV")
@@ -1220,14 +1220,14 @@
os.makedirs(FvPath)
except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to
create FV folder under %s" % self.BuildDir)
-
-
+
+
VpdFilePath = os.path.join(FvPath, "%s.txt" %
self.Platform.VpdToolGuid)
-
+
if not os.path.exists(VpdFilePath) or
os.path.getmtime(VpdFilePath) < DscTimeStamp:
VpdFile.Write(VpdFilePath)
-
+
# retrieve BPDG tool's path from tool_def.txt according to
VPD_TOOL_GUID defined in DSC file.
BPDGToolName = None
for ToolDef in self.ToolDefinition.values():
@@ -1241,13 +1241,13 @@
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName,
VpdFilePath)
else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find
third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in
tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
-
+
# Process VPD map file generated by third party BPDG tool
if NeedProcessVpdMapFile:
VpdMapFilePath = os.path.join(self.BuildDir, "FV",
"%s.map" % self.Platform.VpdToolGuid)
if os.path.exists(VpdMapFilePath):
VpdFile.Read(VpdMapFilePath)
-
+
# Fixup "*" offset
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is
unicode string type
@@ -1258,9 +1258,9 @@
i += 1
else:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not
find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
-
+
# Delete the DynamicPcdList At the last time enter into this
function
- del self._DynamicPcdList[:]
+ del self._DynamicPcdList[:]
self._DynamicPcdList.extend(UnicodePcdArray)
self._DynamicPcdList.extend(HiiPcdArray)
self._DynamicPcdList.extend(OtherPcdArray)
@@ -1471,10 +1471,10 @@
else:
if self._BuildRule._FileVersion < AutoGenReqBuildRuleVerNum :
# If Build Rule's version is less than the version number
required by the tools, halting the build.
- EdkLogger.error("build", AUTOGEN_ERROR,
+ EdkLogger.error("build", AUTOGEN_ERROR,
ExtraData="The version number [%s] of
build_rule.txt is less than the version number required by the AutoGen.(the
minimum required version number is [%s])"\
% (self._BuildRule._FileVersion,
AutoGenReqBuildRuleVerNum))
-
+
return self._BuildRule
## Summarize the packages used by modules in this platform
@@ -1534,28 +1534,28 @@
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d"
% (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
self._PcdTokenNumber[Pcd.TokenCName,
Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
-
+
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "PEI":
if Pcd.Type in ["DynamicEx", "DynamicExDefault",
"DynamicExVpd", "DynamicExHii"]:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d"
% (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
self._PcdTokenNumber[Pcd.TokenCName,
Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
-
+
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "DXE":
if Pcd.Type in ["Dynamic", "DynamicDefault", "DynamicVpd",
"DynamicHii"]:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d"
% (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
self._PcdTokenNumber[Pcd.TokenCName,
Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
-
+
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "DXE":
if Pcd.Type in ["DynamicEx", "DynamicExDefault",
"DynamicExVpd", "DynamicExHii"]:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d"
% (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
self._PcdTokenNumber[Pcd.TokenCName,
Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
-
+
for Pcd in self.NonDynamicPcdList:
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
= TokenNumber
TokenNumber += 1
@@ -1787,7 +1787,7 @@
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in
[None, ''])\
and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in
FromPcd.Type):
if ToPcd.Type.strip() == "DynamicEx":
- ToPcd.Type = FromPcd.Type
+ ToPcd.Type = FromPcd.Type
elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None,
''] \
and ToPcd.Type != FromPcd.Type:
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD
type",
@@ -1850,11 +1850,11 @@
#
def ApplyPcdSetting(self, Module, Pcds):
# for each PCD in module
- for Name,Guid in Pcds:
- PcdInModule = Pcds[Name,Guid]
+ for Name, Guid in Pcds:
+ PcdInModule = Pcds[Name, Guid]
# find out the PCD setting in platform
- if (Name,Guid) in self.Platform.Pcds:
- PcdInPlatform = self.Platform.Pcds[Name,Guid]
+ if (Name, Guid) in self.Platform.Pcds:
+ PcdInPlatform = self.Platform.Pcds[Name, Guid]
else:
PcdInPlatform = None
# then override the settings if any
@@ -1927,8 +1927,8 @@
# @retval Value Priority value based on the priority list.
#
def CalculatePriorityValue(self, Key):
- Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
- PriorityValue = 0x11111
+ Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
+ PriorityValue = 0x11111
if Target == "*":
PriorityValue &= 0x01111
if ToolChain == "*":
@@ -1939,10 +1939,10 @@
PriorityValue &= 0x11101
if Attr == "*":
PriorityValue &= 0x11110
-
- return self.PrioList["0x%0.5x"%PriorityValue]
-
+ return self.PrioList["0x%0.5x" % PriorityValue]
+
+
## Expand * in build option key
#
# @param Options Options to be expanded
@@ -1953,7 +1953,7 @@
BuildOptions = {}
FamilyMatch = False
FamilyIsNull = True
-
+
OverrideList = {}
#
# Construct a list contain the build options which need override.
@@ -1970,7 +1970,7 @@
if ToolChain == self.ToolChain or ToolChain == "*":
if Arch == self.Arch or Arch == "*":
if Options[Key].startswith("="):
- if OverrideList.get(Key[1]) != None:
+ if OverrideList.get(Key[1]) != None:
OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key]
@@ -1978,9 +1978,9 @@
# Use the highest priority value.
#
if (len(OverrideList) >= 2):
- KeyList = OverrideList.keys()
+ KeyList = OverrideList.keys()
for Index in range(len(KeyList)):
- NowKey = KeyList[Index]
+ NowKey = KeyList[Index]
Target1, ToolChain1, Arch1, CommandType1, Attr1 =
NowKey.split("_")
for Index1 in range(len(KeyList) - Index - 1):
NextKey = KeyList[Index1 + Index + 1]
@@ -1994,10 +1994,10 @@
if CommandType1 == CommandType2 or
CommandType1 == "*" or CommandType2 == "*":
if Attr1 == Attr2 or Attr1 == "*" or Attr2
== "*":
if self.CalculatePriorityValue(NowKey)
> self.CalculatePriorityValue(NextKey):
- if
Options.get((self.BuildRuleFamily, NextKey)) != None:
+ if
Options.get((self.BuildRuleFamily, NextKey)) != None:
Options.pop((self.BuildRuleFamily, NextKey))
else:
- if
Options.get((self.BuildRuleFamily, NowKey)) != None:
+ if
Options.get((self.BuildRuleFamily, NowKey)) != None:
Options.pop((self.BuildRuleFamily, NowKey))
for Key in Options:
@@ -2045,7 +2045,7 @@
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
- if Tool not in self.ToolDefinition or Family =="":
+ if Tool not in self.ToolDefinition or Family == "":
continue
# option has been added before
if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:
@@ -2637,9 +2637,9 @@
# is the former use /I , the Latter used -I to specify include
directories
#
if self.PlatformInfo.ToolChainFamily in ('MSFT'):
- gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^
]*)", re.MULTILINE|re.DOTALL)
+ gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^
]*)", re.MULTILINE | re.DOTALL)
elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
- gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^
]*)", re.MULTILINE|re.DOTALL)
+ gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^
]*)", re.MULTILINE | re.DOTALL)
else:
#
# New ToolChainFamily, don't known whether there is option to
specify include directories
@@ -2673,11 +2673,11 @@
if self.AutoGenVersion >= 0x00010005 and len(IncPathList) > 0:
for Path in IncPathList:
if (Path not in self.IncludePathList) and
(CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
- ErrMsg = "The include directory for the EDK II
module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool,
FlagOption)
- EdkLogger.error("build",
+ ErrMsg = "The include directory for the EDK II
module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool,
FlagOption)
+ EdkLogger.error("build",
PARAMETER_INVALID,
- ExtraData = ErrMsg,
- File = str(self.MetaFile))
+ ExtraData=ErrMsg,
+ File=str(self.MetaFile))
BuildOptionIncPathList += IncPathList
@@ -2797,7 +2797,7 @@
if File.IsBinary and File == Source and self._BinaryFileList !=
None and File in self._BinaryFileList:
# Skip all files that are not binary libraries
if not self.IsLibrary:
- continue
+ continue
RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
elif FileType in self.BuildRules:
RuleObject = self.BuildRules[FileType]
@@ -3215,7 +3215,7 @@
# Also find all packages that the DynamicEx PCDs depend on
Pcds = []
PatchablePcds = {}
- Packages = []
+ Packages = []
PcdCheckList = []
PcdTokenSpaceList = []
for Pcd in self.ModulePcdList + self.LibraryPcdList:
@@ -3292,7 +3292,7 @@
'module_uefi_hii_resource_section' :
[MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs
else [],
'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if
'MODULE_UNI_FILE' in MDefs else [],
'module_arch' : self.Arch,
- 'package_item' : ['%s' %
(Package.MetaFile.File.replace('\\','/')) for Package in Packages],
+ 'package_item' : ['%s' %
(Package.MetaFile.File.replace('\\', '/')) for Package in Packages],
'binary_item' : [],
'patchablepcd_item' : [],
'pcd_item' : [],
@@ -3316,27 +3316,27 @@
if 'PI_SPECIFICATION_VERSION' in self.Specification:
AsBuiltInfDict['module_pi_specification_version'] +=
[self.Specification['PI_SPECIFICATION_VERSION']]
- OutputDir = self.OutputDir.replace('\\','/').strip('/')
+ OutputDir = self.OutputDir.replace('\\', '/').strip('/')
if self.ModuleType in ['BASE', 'USER_DEFINED']:
for Item in self.CodaTargetList:
- File =
Item.Target.Path.replace('\\','/').strip('/').replace(OutputDir,'').strip('/')
- if Item.Target.Ext.lower() == '.aml':
+ File = Item.Target.Path.replace('\\',
'/').strip('/').replace(OutputDir, '').strip('/')
+ if Item.Target.Ext.lower() == '.aml':
AsBuiltInfDict['binary_item'] += ['ASL|' + File]
- elif Item.Target.Ext.lower() == '.acpi':
+ elif Item.Target.Ext.lower() == '.acpi':
AsBuiltInfDict['binary_item'] += ['ACPI|' + File]
else:
AsBuiltInfDict['binary_item'] += ['BIN|' + File]
else:
for Item in self.CodaTargetList:
- File =
Item.Target.Path.replace('\\','/').strip('/').replace(OutputDir,'').strip('/')
- if Item.Target.Ext.lower() == '.efi':
+ File = Item.Target.Path.replace('\\',
'/').strip('/').replace(OutputDir, '').strip('/')
+ if Item.Target.Ext.lower() == '.efi':
AsBuiltInfDict['binary_item'] += ['PE32|' + self.Name + '.efi']
else:
AsBuiltInfDict['binary_item'] += ['BIN|' + File]
if self.DepexGenerated:
if self.ModuleType in ['PEIM']:
AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name +
'.depex']
- if self.ModuleType in
['DXE_DRIVER','DXE_RUNTIME_DRIVER','DXE_SAL_DRIVER','UEFI_DRIVER']:
+ if self.ModuleType in ['DXE_DRIVER', 'DXE_RUNTIME_DRIVER',
'DXE_SAL_DRIVER', 'UEFI_DRIVER']:
AsBuiltInfDict['binary_item'] += ['DXE_DEPEX|' + self.Name +
'.depex']
if self.ModuleType in ['DXE_SMM_DRIVER']:
AsBuiltInfDict['binary_item'] += ['SMM_DEPEX|' + self.Name +
'.depex']
Modified: branches/UDK2015/BaseTools/Source/Python/AutoGen/BuildEngine.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/AutoGen/BuildEngine.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/AutoGen/BuildEngine.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -388,7 +388,7 @@
# find the build_rule_version
if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION)
<> -1:
- if Line.find("=") <> -1 and Line.find("=") < (len(Line)-1) and
(Line[(Line.find("=") + 1):]).split():
+ if Line.find("=") <> -1 and Line.find("=") < (len(Line) - 1)
and (Line[(Line.find("=") + 1):]).split():
self._FileVersion = (Line[(Line.find("=") +
1):]).split()[0]
# skip empty or comment line
if Line == "" or Line[0] == "#":
@@ -470,16 +470,16 @@
if TokenList[0] == "BUILD":
if len(TokenList) == 1:
EdkLogger.error("build", FORMAT_INVALID, "Invalid rule
section",
- File=self.RuleFile, Line=LineIndex+1,
+ File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
FileType = TokenList[1]
if FileType == '':
EdkLogger.error("build", FORMAT_INVALID, "No file type
given",
- File=self.RuleFile, Line=LineIndex+1,
+ File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if self._FileTypePattern.match(FileType) == None:
- EdkLogger.error("build", FORMAT_INVALID,
File=self.RuleFile, Line=LineIndex+1,
+ EdkLogger.error("build", FORMAT_INVALID,
File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Only character, number
(non-first character), '_' and '-' are allowed in file type")
# new format: File-Type.Build-Type.Arch
else:
@@ -488,7 +488,7 @@
elif FileType != TokenList[0]:
EdkLogger.error("build", FORMAT_INVALID,
"Different file types are not allowed in
the same rule section",
- File=self.RuleFile, Line=LineIndex+1,
+ File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if len(TokenList) > 1:
BuildType = TokenList[1]
@@ -502,12 +502,12 @@
if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1:
EdkLogger.error("build", FORMAT_INVALID,
"Specific build types must not be mixed with
common one",
- File=self.RuleFile, Line=LineIndex+1,
+ File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if 'COMMON' in self._ArchList and len(self._ArchList) > 1:
EdkLogger.error("build", FORMAT_INVALID,
"Specific ARCH must not be mixed with common one",
- File=self.RuleFile, Line=LineIndex+1,
+ File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
self._FileType = FileType
@@ -531,7 +531,7 @@
elif SectionType != Type:
EdkLogger.error("build", FORMAT_INVALID,
"Two different section types are not allowed
in the same sub-section",
- File=self.RuleFile, Line=LineIndex+1,
+ File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if len(TokenList) > 1:
@@ -548,10 +548,10 @@
if 'COMMON' in FamilyList and len(FamilyList) > 1:
EdkLogger.error("build", FORMAT_INVALID,
"Specific tool chain family should not be mixed
with general one",
- File=self.RuleFile, Line=LineIndex+1,
+ File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if self._State not in self._StateHandler:
- EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile,
Line=LineIndex+1,
+ EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile,
Line=LineIndex + 1,
ExtraData="Unknown subsection: %s" %
self.RuleContent[LineIndex])
## Parse <InputFile> sub-section
#
Modified: branches/UDK2015/BaseTools/Source/Python/AutoGen/GenDepex.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/AutoGen/GenDepex.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/AutoGen/GenDepex.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -286,7 +286,7 @@
# don't generate depex if only TRUE operand left
if self.ModuleType == 'PEIM' and len(NewOperand) == 1 and
NewOperand[0] == 'TRUE':
self.PostfixNotation = []
- return
+ return
# don't generate depex if all operands are architecture protocols
if self.ModuleType in ['UEFI_DRIVER', 'DXE_DRIVER',
'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'DXE_SMM_DRIVER'] and \
@@ -424,7 +424,7 @@
Dpx = DependencyExpression(DxsString, Option.ModuleType,
Option.Optimize)
if Option.OutputFile != None:
FileChangeFlag = Dpx.Generate(Option.OutputFile)
- if not FileChangeFlag and DxsFile:
+ if not FileChangeFlag and DxsFile:
#
# Touch the output file if its time stamp is older than the
original
# DXS file to avoid re-invoke this tool for the dependency
check in build rule.
Modified: branches/UDK2015/BaseTools/Source/Python/AutoGen/GenMake.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/AutoGen/GenMake.py 2015-12-18
06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/AutoGen/GenMake.py 2015-12-18
06:43:57 UTC (rev 19369)
@@ -27,7 +27,7 @@
import Common.GlobalData as GlobalData
## Regular expression for finding header file inclusions
-gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[
\t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[
\t]*[\">]?\)?)", re.MULTILINE|re.UNICODE|re.IGNORECASE)
+gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[
\t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[
\t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
## Regular expression for matching macro used in header file inclusion
gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE)
@@ -499,7 +499,7 @@
# convert source files and binary files to build targets
self.ResultFileList = [str(T.Target) for T in
self._AutoGenObject.CodaTargetList]
- if len(self.ResultFileList) == 0 and
len(self._AutoGenObject.SourceFileList) <> 0:
+ if len(self.ResultFileList) == 0 and
len(self._AutoGenObject.SourceFileList) <> 0:
EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",
ExtraData="[%s]" % str(self._AutoGenObject))
@@ -520,9 +520,9 @@
FileMacro = ""
IncludePathList = []
for P in self._AutoGenObject.IncludePathList:
- IncludePathList.append(IncPrefix+self.PlaceMacro(P, self.Macros))
+ IncludePathList.append(IncPrefix + self.PlaceMacro(P, self.Macros))
if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros:
-
self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix+P)
+
self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix + P)
FileMacro += self._FILE_MACRO_TEMPLATE.Replace(
{
"macro_name" : "INC",
@@ -533,7 +533,7 @@
# Generate macros used to represent files containing list of input
files
for ListFileMacro in self.ListFileMacros:
- ListFileName = os.path.join(self._AutoGenObject.OutputDir,
"%s.lst" % ListFileMacro.lower()[:len(ListFileMacro)-5])
+ ListFileName = os.path.join(self._AutoGenObject.OutputDir,
"%s.lst" % ListFileMacro.lower()[:len(ListFileMacro) - 5])
FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName))
SaveFileOnChange(
ListFileName,
@@ -767,7 +767,7 @@
try:
Fd = open(F.Path, 'r')
except BaseException, X:
- EdkLogger.error("build", FILE_OPEN_FAILURE,
ExtraData=F.Path+"\n\t"+str(X))
+ EdkLogger.error("build", FILE_OPEN_FAILURE,
ExtraData=F.Path + "\n\t" + str(X))
FileContent = Fd.read()
Fd.close()
Modified: branches/UDK2015/BaseTools/Source/Python/AutoGen/GenPcdDb.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/AutoGen/GenPcdDb.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/AutoGen/GenPcdDb.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -784,11 +784,11 @@
DbTotal = [InitValueUint64, VardefValueUint64, InitValueUint32,
VardefValueUint32, VpdHeadValue, ExMapTable,
LocalTokenNumberTable, GuidTable, StringHeadValue,
PcdNameOffsetTable,VariableTable,SkuTable, StringTableLen,
PcdTokenTable,PcdCNameTable,
- SizeTableValue, InitValueUint16,
VardefValueUint16,InitValueUint8, VardefValueUint8, InitValueBoolean,
+ SizeTableValue, InitValueUint16, VardefValueUint16,
InitValueUint8, VardefValueUint8, InitValueBoolean,
VardefValueBoolean, SkuidValue, SkuIndexValue,
UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8,
UnInitValueBoolean]
DbItemTotal = [DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32,
DbVardefValueUint32, DbVpdHeadValue, DbExMapTable,
DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue,
DbPcdNameOffsetTable,DbVariableTable,DbSkuTable, DbStringTableLen,
DbPcdTokenTable, DbPcdCNameTable,
- DbSizeTableValue, DbInitValueUint16,
DbVardefValueUint16,DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
+ DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16,
DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
DbVardefValueBoolean, DbSkuidValue, DbSkuIndexValue,
DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16,
DbUnInitValueUint8, DbUnInitValueBoolean]
# SkuidValue is the last table in the init table items
@@ -1343,7 +1343,7 @@
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '"':
DefaultValueBinStructure =
StringToArray(Sku.DefaultValue)
- Size = len(Sku.DefaultValue) -2 + 1
+ Size = len(Sku.DefaultValue) - 2 + 1
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '{':
DefaultValueBinStructure =
StringToArray(Sku.DefaultValue)
@@ -1375,7 +1375,7 @@
Pcd.InitString = 'INIT'
else:
if int(Sku.DefaultValue, 0) != 0:
- Pcd.InitString = 'INIT'
+ Pcd.InitString = 'INIT'
#
# For UNIT64 type PCD's value, ULL should be append to avoid
# warning under linux building environment.
Modified: branches/UDK2015/BaseTools/Source/Python/AutoGen/StrGather.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/AutoGen/StrGather.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/AutoGen/StrGather.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -113,7 +113,7 @@
# @retval: A list for formatted hex string
#
def DecToHexList(Dec, Digit = 8):
- Hex = eval("'%0" + str(Digit) + "X' % int(Dec)" )
+ Hex = eval("'%0" + str(Digit) + "X' % int(Dec)")
List = []
for Bit in range(Digit - 2, -1, -2):
List.append(HexHeader + Hex[Bit:Bit + 2])
@@ -192,7 +192,7 @@
Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' *
(ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) +
COMMENT_NOT_REFERENCED
UnusedStr = WriteLine(UnusedStr, Line)
- Str = ''.join([Str,UnusedStr])
+ Str = ''.join([Str, UnusedStr])
Str = WriteLine(Str, '')
if IsCompatibleMode or UniGenCFlag:
@@ -235,7 +235,7 @@
#
def CreateBinBuffer(BinBuffer, Array):
for Item in Array:
- BinBuffer.write(pack("B", int(Item,16)))
+ BinBuffer.write(pack("B", int(Item, 16)))
## Create a formatted string all items in an array
#
@@ -258,7 +258,7 @@
Index = Index + 1
else:
ArrayItem = WriteLine(ArrayItem, Line)
- Line = ' ' + Item + ', '
+ Line = ' ' + Item + ', '
Index = 1
ArrayItem = Write(ArrayItem, Line.rstrip())
@@ -320,7 +320,7 @@
if PrimaryTag == UniLanguagePrimaryTag:
if UniLanguage not in UniLanguageListFiltered:
- UniLanguageListFiltered += [UniLanguage]
+ UniLanguageListFiltered += [UniLanguage]
break
else:
# Here is rule 3 for "get best language"
@@ -368,7 +368,7 @@
UniLanguageList = []
for IndexI in range(len(UniObjectClass.LanguageDef)):
- UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
+ UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList,
LanguageFilterList)
@@ -450,14 +450,14 @@
if UniBinBuffer:
CreateBinBuffer (UniBinBuffer, List)
UniBinBuffer.write (StringBuffer.getvalue())
- UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END,16)))
+ UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END, 16)))
StringBuffer.close()
#
# Create line for string variable name
# "unsigned char $(BaseName)Strings[] = {"
#
- AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName +
COMMON_FILE_NAME + '[] = {\n' )
+ AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName +
COMMON_FILE_NAME + '[] = {\n')
if IsCompatibleMode:
#
@@ -618,13 +618,13 @@
# Write an item
#
def Write(Target, Item):
- return ''.join([Target,Item])
+ return ''.join([Target, Item])
#
# Write an item with a break line
#
def WriteLine(Target, Item):
- return ''.join([Target,Item,'\n'])
+ return ''.join([Target, Item, '\n'])
# This acts like the main() function for the script, unless it is 'import'ed
into another
# script.
Modified: branches/UDK2015/BaseTools/Source/Python/AutoGen/UniClassObject.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/AutoGen/UniClassObject.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/AutoGen/UniClassObject.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -248,7 +248,7 @@
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
LineNo = GetLineNo(FileIn, Line, False)
EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong
language definition",
- ExtraData="""%s\n\t*Correct format is like
'#langdef en-US "English"'""" % Line, File = File, Line = LineNo)
+ ExtraData="""%s\n\t*Correct format is like
'#langdef en-US "English"'""" % Line, File=File, Line=LineNo)
else:
LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode,
self.File)
LangPrintName = Lang[2]
@@ -352,7 +352,7 @@
if Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
if MatchString == None or MatchString.end(0) != len(Name):
- EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The
string token name %s defined in UNI file %s contains the invalid lower case
character.' %(Name, self.File))
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The
string token name %s defined in UNI file %s contains the invalid lower case
character.' % (Name, self.File))
LanguageList = Item.split(u'#language ')
for IndexI in range(len(LanguageList)):
if IndexI == 0:
@@ -512,7 +512,7 @@
if not self.IsCompatibleMode and Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
if MatchString == None or MatchString.end(0) != len(Name):
- EdkLogger.error('Unicode File Parser', FORMAT_INVALID,
'The string token name %s defined in UNI file %s contains the invalid lower
case character.' %(Name, self.File))
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID,
'The string token name %s defined in UNI file %s contains the invalid lower
case character.' % (Name, self.File))
self.AddStringToList(Name, Language, Value)
continue
@@ -571,7 +571,7 @@
ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value)
- Item.UseOtherLangDef = ''
+ Item.UseOtherLangDef = ''
if IsAdded:
Token = len(self.OrderedStringList[Language])
Modified: branches/UDK2015/BaseTools/Source/Python/BPDG/GenVpd.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/BPDG/GenVpd.py 2015-12-18
06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/BPDG/GenVpd.py 2015-12-18
06:43:57 UTC (rev 19369)
@@ -48,19 +48,19 @@
self.PcdBinSize = PcdBinSize
if self.PcdValue == '' :
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid PCD format(Name: %s File: %s line: %s) ,
no Value specified!" %(self.PcdCName, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s line: %s) ,
no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))
+
if self.PcdOffset == '' :
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid PCD format(Name: %s File: %s Line: %s) ,
no Offset specified!" %(self.PcdCName, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s Line: %s) ,
no Offset specified!" % (self.PcdCName, self.FileName, self.Lineno))
+
if self.PcdSize == '' :
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid PCD format(Name: %s File: %s Line: %s),
no PcdSize specified!" %(self.PcdCName, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s Line: %s),
no PcdSize specified!" % (self.PcdCName, self.FileName, self.Lineno))
+
self._GenOffsetValue ()
-
+
## Analyze the string value to judge the PCD's datum type euqal to Boolean
or not.
#
# @param ValueString PCD's value
@@ -74,10 +74,10 @@
if ValueString.upper() in ["TRUE", "FALSE"]:
return True
elif ValueString in ["0", "1", "0x0", "0x1", "0x00", "0x01"]:
- return True
-
+ return True
+
return False
-
+
## Convert the PCD's value from string to integer.
#
# This function will try to convert the Offset value form string to
integer
@@ -91,9 +91,9 @@
try:
self.PcdBinOffset = int(self.PcdOffset, 16)
except:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid offset value %s for PCD %s (File:
%s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid offset value %s for PCD %s (File:
%s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
+
## Pack Boolean type VPD PCD's value form string to binary type.
#
# @param ValueString The boolean type string for pack.
@@ -101,18 +101,18 @@
#
def _PackBooleanValue(self, ValueString):
if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1",
"0x01"]:
- try:
- self.PcdValue = pack(_FORMAT_CHAR[1], 1)
+ try:
+ self.PcdValue = pack(_FORMAT_CHAR[1], 1)
except:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size or value for PCD %s to
pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size or value for PCD %s to
pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
else:
try:
- self.PcdValue = pack(_FORMAT_CHAR[1], 0)
+ self.PcdValue = pack(_FORMAT_CHAR[1], 0)
except:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size or value for PCD %s to
pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size or value for PCD %s to
pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+
## Pack Integer type VPD PCD's value form string to binary type.
#
# @param ValueString The Integer type string for pack.
@@ -120,46 +120,46 @@
#
def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR.keys():
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size %d for PCD %s in integer datum
size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
-
- if Size == 1:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size %d for PCD %s in integer datum
size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
+
+ if Size == 1:
if IntValue < 0:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD
%s in UINT8 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName,
self.FileName, self.Lineno))
elif IntValue >= 0x100:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Too large PCD value %d for datum type UINT8
for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName,
self.Lineno))
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Too large PCD value %d for datum type UINT8
for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName,
self.Lineno))
elif Size == 2:
if IntValue < 0:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD
%s in UINT16 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName,
self.FileName, self.Lineno))
elif IntValue >= 0x10000:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Too large PCD value %d for datum type UINT16
for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName,
self.Lineno))
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Too large PCD value %d for datum type UINT16
for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName,
self.Lineno))
elif Size == 4:
if IntValue < 0:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD
%s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName,
self.FileName, self.Lineno))
elif IntValue >= 0x100000000:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Too large PCD value %d for datum type UINT32
for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName,
self.Lineno))
elif Size == 8:
if IntValue < 0:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD
%s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName,
self.FileName, self.Lineno))
elif IntValue >= 0x10000000000000000:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Too large PCD value %d for datum type UINT32
for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName,
self.Lineno))
else:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size %d for PCD %s in integer datum
size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size %d for PCD %s in integer datum
size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
+
try:
- self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
+ self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
except:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size or value for PCD %s to pack(File: %s
Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size or value for PCD %s to pack(File: %s
Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack VOID* type VPD PCD's value form string to binary type.
#
@@ -178,53 +178,53 @@
elif ValueString.startswith('"') and ValueString.endswith('"'):
self._PackString(ValueString, Size)
else:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid VOID* type PCD %s value %s (File: %s
Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid VOID* type PCD %s value %s (File: %s
Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
+
## Pack an Ascii PCD value.
#
# An Ascii string for a PCD should be in format as "".
#
def _PackString(self, ValueString, Size):
if (Size < 0):
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid parameter Size %s of PCD %s!(File: %s
Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (ValueString == ""):
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid
parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue,
self.PcdCName, self.FileName, self.Lineno))
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid
parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue,
self.PcdCName, self.FileName, self.Lineno))
if (len(ValueString) < 2):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD:
%s ,ASCII string %s at least contains two!(File: %s Line: %s)" %
(self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno))
-
+
ValueString = ValueString[1:-1]
if len(ValueString) + 1 > Size:
- EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s
Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
try:
- self.PcdValue= pack('%ds' % Size, ValueString)
+ self.PcdValue = pack('%ds' % Size, ValueString)
except:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size or value for PCD %s to pack(File: %s
Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size or value for PCD %s to pack(File: %s
Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+
## Pack a byte-array PCD value.
#
# A byte-array for a PCD should be in format as {0x01, 0x02, ...}.
#
def _PackByteArray(self, ValueString, Size):
- if (Size < 0):
+ if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid
parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize,
self.PcdCName, self.FileName, self.Lineno))
if (ValueString == ""):
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid
parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue,
self.PcdCName, self.FileName, self.Lineno))
-
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid
parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue,
self.PcdCName, self.FileName, self.Lineno))
+
ValueString = ValueString.strip()
ValueString = ValueString.lstrip('{').strip('}')
ValueList = ValueString.split(',')
ValueList = [item.strip() for item in ValueList]
-
+
if len(ValueList) > Size:
- EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The byte array %s is too large for size %d(File:
%s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
-
+
ReturnArray = array.array('B')
-
+
for Index in xrange(len(ValueList)):
Value = None
if ValueList[Index].lower().startswith('0x'):
@@ -232,7 +232,7 @@
try:
Value = int(ValueList[Index], 16)
except:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an
invalid HEX value.(File: %s Line: %s)" % \
(ValueList[Index], ValueString,
self.FileName, self.Lineno))
else:
@@ -243,55 +243,55 @@
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an
invalid DECIMAL value.(File: %s Line: %s)" % \
(ValueList[Index], ValueString,
self.FileName, self.Lineno))
-
+
if Value > 255:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "The value item %s in byte array %s do not in
range 0 ~ 0xFF(File: %s Line: %s)" %\
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "The value item %s in byte array %s do not in
range 0 ~ 0xFF(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName,
self.Lineno))
-
+
ReturnArray.append(Value)
-
+
for Index in xrange(len(ValueList), Size):
ReturnArray.append(0)
-
- self.PcdValue = ReturnArray.tolist()
+ self.PcdValue = ReturnArray.tolist()
+
## Pack a unicode PCD value into byte array.
#
# A unicode string for a PCD should be in format as L"".
#
def _PackUnicode(self, UnicodeString, Size):
- if (Size < 0):
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid
parameter Size %s of PCD %s!(File: %s Line: %s)" %\
+ if (Size < 0):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid
parameter Size %s of PCD %s!(File: %s Line: %s)" % \
(self.PcdBinSize, self.PcdCName, self.FileName,
self.Lineno))
if (len(UnicodeString) < 3):
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD:
%s ,ASCII string %s at least contains two!(File: %s Line: %s)" %\
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD:
%s ,ASCII string %s at least contains two!(File: %s Line: %s)" % \
(self.PcdCName, self.PcdUnpackValue,
self.FileName, self.Lineno))
-
+
UnicodeString = UnicodeString[2:-1]
-
+
if (len(UnicodeString) + 1) * 2 > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The size of unicode string %s is too larger for
size %s(File: %s Line: %s)" % \
(UnicodeString, Size, self.FileName, self.Lineno))
-
+
ReturnArray = array.array('B')
for Value in UnicodeString:
try:
ReturnArray.append(ord(Value))
ReturnArray.append(0)
except:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid unicode character %s in unicode
string %s(File: %s Line: %s)" % \
(Value, UnicodeString, self.FileName,
self.Lineno))
-
+
for Index in xrange(len(UnicodeString) * 2, Size):
ReturnArray.append(0)
-
- self.PcdValue = ReturnArray.tolist()
+ self.PcdValue = ReturnArray.tolist()
+
## The class implementing the BPDG VPD PCD offset fix process
#
# The VPD PCD offset fix process includes:
@@ -300,7 +300,7 @@
# 3. Fixed offset if needed;
# 4. Generate output file, including guided.map and guided.bin file;
#
-class GenVPD :
+class GenVPD :
## Constructor of DscBuildData
#
# Initialize object of GenVPD
@@ -322,47 +322,47 @@
try:
self.FileLinesList = fInputfile.readlines()
except:
- EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE,
"File read failed for %s" %InputFileName,None)
+ EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE,
"File read failed for %s" % InputFileName, None)
finally:
fInputfile.close()
except:
- EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File
open failed for %s" %InputFileName,None)
-
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File
open failed for %s" % InputFileName, None)
+
##
# Parser the input file which is generated by the build tool. Convert the
value of each pcd's
# from string to it's real format. Also remove the useless line in the
input file.
#
def ParserInputFile (self):
- count = 0
+ count = 0
for line in self.FileLinesList:
# Strip "\r\n" generated by readlines ().
line = line.strip()
line = line.rstrip(os.linesep)
-
+
# Skip the comment line
if (not line.startswith("#")) and len(line) > 1 :
#
# Enhanced for support "|" character in the string.
#
ValueList = ['', '', '', '','']
-
- ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
+
+ ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
PtrValue = ValueRe.findall(line)
-
+
ValueUpdateFlag = False
-
+
if len(PtrValue) >= 1:
line = re.sub(ValueRe, '', line)
- ValueUpdateFlag = True
-
+ ValueUpdateFlag = True
+
TokenList = line.split('|')
ValueList[0:len(TokenList)] = TokenList
-
+
if ValueUpdateFlag:
ValueList[4] = PtrValue[0]
self.FileLinesList[count] = ValueList
# Store the line number
- self.FileLinesList[count].append(str(count+1))
+ self.FileLinesList[count].append(str(count + 1))
elif len(line) <= 1 :
# Set the blank line to "None"
self.FileLinesList[count] = None
@@ -370,9 +370,9 @@
# Set the comment line to "None"
self.FileLinesList[count] = None
count += 1
-
+
# The line count contain usage information
- count = 0
+ count = 0
# Delete useless lines
while (True) :
try :
@@ -381,18 +381,18 @@
else :
count += 1
except :
- break
+ break
#
# After remove the useless line, if there are no data remain in the
file line list,
# Report warning messages to user's.
#
if len(self.FileLinesList) == 0 :
- EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
+ EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
"There are no VPD type pcds defined in DSC file,
Please check it.")
-
+
# Process the pcds one by one base on the pcd's value and size
count = 0
- for line in self.FileLinesList:
+ for line in self.FileLinesList:
if line != None :
PCD = PcdEntry(line[0], line[1], line[2], line[3],
line[4],line[5], self.InputFileName)
# Strip the space char
@@ -421,7 +421,7 @@
PCD.PcdBinSize = PackSize
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize,
self.InputFileName, PCD.Lineno))
-
+
if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
PCD._PackBooleanValue(PCD.PcdValue)
self.FileLinesList[count] = PCD
@@ -431,7 +431,7 @@
# Try to translate value to an integer firstly.
#
IsInteger = True
- PackValue = None
+ PackValue = None
try:
PackValue = int(PCD.PcdValue)
except:
@@ -439,23 +439,23 @@
PackValue = int(PCD.PcdValue, 16)
except:
IsInteger = False
-
+
if IsInteger:
PCD._PackIntValue(PackValue, PackSize)
else:
PCD._PackPtrValue(PCD.PcdValue, PackSize)
-
+
self.FileLinesList[count] = PCD
count += 1
else :
continue
-
+
##
# This function used to create a clean list only contain useful
information and reorganized to make it
# easy to be sorted
#
def FormatFileLine (self) :
-
+
for eachPcd in self.FileLinesList :
if eachPcd.PcdOffset != '*' :
# Use pcd's Offset value as key, and pcd's Value as value
@@ -463,43 +463,43 @@
else :
# Use pcd's CName as key, and pcd's Size as value
self.PcdUnknownOffsetList.append(eachPcd)
-
-
+
+
##
# This function is use to fix the offset value which the not specified in
the map file.
# Usually it use the star (meaning any offset) character in the offset
field
#
- def FixVpdOffset (self):
+ def FixVpdOffset (self):
# At first, the offset should start at 0
# Sort fixed offset list in order to find out where has free spaces
for the pcd's offset
# value is "*" to insert into.
-
- self.PcdFixedOffsetSizeList.sort(lambda x,y: cmp(x.PcdBinOffset,
y.PcdBinOffset))
-
+
+ self.PcdFixedOffsetSizeList.sort(lambda x, y: cmp(x.PcdBinOffset,
y.PcdBinOffset))
+
#
# Sort the un-fixed pcd's offset by it's size.
#
- self.PcdUnknownOffsetList.sort(lambda x,y: cmp(x.PcdBinSize,
y.PcdBinSize))
-
+ self.PcdUnknownOffsetList.sort(lambda x, y: cmp(x.PcdBinSize,
y.PcdBinSize))
+
#
# Process all Offset value are "*"
#
if (len(self.PcdFixedOffsetSizeList) == 0) and
(len(self.PcdUnknownOffsetList) != 0) :
# The offset start from 0
NowOffset = 0
- for Pcd in self.PcdUnknownOffsetList :
+ for Pcd in self.PcdUnknownOffsetList :
Pcd.PcdBinOffset = NowOffset
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
NowOffset += Pcd.PcdBinSize
self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList
return
-
+
# Check the offset of VPD type pcd's offset start from 0.
- if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
+ if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
EdkLogger.warn("BPDG", "The offset of VPD type pcd should start
with 0, please check it.",
- None)
-
+ None)
+
# Judge whether the offset in fixed pcd offset list is overlapped or
not.
lenOfList = len(self.PcdFixedOffsetSizeList)
count = 0
@@ -508,22 +508,22 @@
PcdNext = self.PcdFixedOffsetSizeList[count+1]
# Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
- EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
- "The offset of %s at line: %s is same with %s
at line: %s in file %s" %\
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ "The offset of %s at line: %s is same with %s
at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno,
PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
-
+
# Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize > PcdNext.PcdBinOffset :
- EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
- "The offset of %s at line: %s is overlapped
with %s at line: %s in file %s" %\
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ "The offset of %s at line: %s is overlapped
with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno,
PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
-
+
# Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize < PcdNext.PcdBinOffset :
- EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
- "The offsets have free space of between %s at
line: %s and %s at line: %s in file %s" %\
+ EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ "The offsets have free space of between %s at
line: %s and %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno,
PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
count += 1
@@ -545,7 +545,7 @@
if LastOffset < NowOffset :
if lenOfUnfixedList != 0 :
countOfUnfixedList = 0
- while(countOfUnfixedList < lenOfUnfixedList) :
+ while(countOfUnfixedList < lenOfUnfixedList) :
eachUnfixedPcd =
self.PcdUnknownOffsetList[countOfUnfixedList]
needFixPcdSize = eachUnfixedPcd.PcdBinSize
# Not been fixed
@@ -586,8 +586,8 @@
FixOffsetSizeListCount += 1
# Usually it will not enter into this thunk, if so, means it
overlapped.
else :
- EdkLogger.error("BPDG",
BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
- "The offset value definition has overlapped at
pcd: %s, it's offset is: %s, in file: %s line: %s" %\
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
+ "The offset value definition has overlapped at
pcd: %s, it's offset is: %s, in file: %s line: %s" % \
(eachFixedPcd.PcdCName,
eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
None)
FixOffsetSizeListCount += 1
@@ -618,46 +618,46 @@
#Open an VPD file to process
try:
- fVpdFile = open (BinFileName, "wb", 0)
+ fVpdFile = open(BinFileName, "wb", 0)
except:
# Open failed
- EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File
open failed for %s" %self.VpdFileName,None)
-
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File
open failed for %s" % self.VpdFileName, None)
+
try :
- fMapFile = open (MapFileName, "w", 0)
+ fMapFile = open(MapFileName, "w", 0)
except:
# Open failed
- EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File
open failed for %s" %self.MapFileName,None)
-
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File
open failed for %s" % self.MapFileName, None)
+
# Use a instance of StringIO to cache data
- fStringIO = StringIO.StringIO('')
-
+ fStringIO = StringIO.StringIO('')
+
# Write the header of map file.
try :
fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n")
except:
- EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write
data to file %s failed, please check whether the file been locked or using by
other applications." %self.MapFileName,None)
-
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write
data to file %s failed, please check whether the file been locked or using by
other applications." % self.MapFileName, None)
+
for eachPcd in self.PcdFixedOffsetSizeList :
# write map file
try :
fMapFile.write("%s | %s | %s | %s | %s \n" %
(eachPcd.PcdCName, eachPcd.SkuId,eachPcd.PcdOffset,
eachPcd.PcdSize,eachPcd.PcdUnpackValue))
except:
- EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE,
"Write data to file %s failed, please check whether the file been locked or
using by other applications." %self.MapFileName,None)
-
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE,
"Write data to file %s failed, please check whether the file been locked or
using by other applications." % self.MapFileName, None)
+
# Write Vpd binary file
- fStringIO.seek (eachPcd.PcdBinOffset)
+ fStringIO.seek (eachPcd.PcdBinOffset)
if isinstance(eachPcd.PcdValue, list):
ValueList = [chr(Item) for Item in eachPcd.PcdValue]
- fStringIO.write(''.join(ValueList))
- else:
+ fStringIO.write(''.join(ValueList))
+ else:
fStringIO.write (eachPcd.PcdValue)
-
- try :
+
+ try :
fVpdFile.write (fStringIO.getvalue())
except:
- EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write
data to file %s failed, please check whether the file been locked or using by
other applications." %self.VpdFileName,None)
-
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write
data to file %s failed, please check whether the file been locked or using by
other applications." % self.VpdFileName, None)
+
fStringIO.close ()
fVpdFile.close ()
fMapFile.close ()
Modified: branches/UDK2015/BaseTools/Source/Python/Common/Dictionary.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/Common/Dictionary.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/Common/Dictionary.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -27,19 +27,19 @@
#
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter,
KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try:
- F = open(FileName,'r')
+ F = open(FileName, 'r')
Keys = []
for Line in F:
if Line.startswith(CommentCharacter):
continue
- LineList = Line.split(KeySplitCharacter,1)
+ LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2:
Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not
in Keys:
if ValueSplitFlag:
- Dictionary[Key[0]] =
LineList[1].replace('\\','/').split(ValueSplitCharacter)
+ Dictionary[Key[0]] = LineList[1].replace('\\',
'/').split(ValueSplitCharacter)
else:
- Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\', '/')
Keys += [Key[0]]
F.close()
return 0
Modified: branches/UDK2015/BaseTools/Source/Python/Common/EdkIIWorkspace.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/Common/EdkIIWorkspace.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/Common/EdkIIWorkspace.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -59,7 +59,7 @@
#
# Load TianoCoreOrgLogo, used for GUI tool
#
- self.Icon =
wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'),wx.BITMAP_TYPE_GIF)
+ self.Icon =
wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'),
wx.BITMAP_TYPE_GIF)
except:
self.Icon = None
@@ -151,7 +151,7 @@
def XmlParseFileSection (self, FileName, SectionTag):
if self.Verbose:
print FileName
- return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
+ return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
## Save a XML file
#
@@ -219,19 +219,19 @@
#
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter,
KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try:
- F = open(FileName,'r')
+ F = open(FileName, 'r')
except:
return False
Keys = []
for Line in F:
- LineList = Line.split(KeySplitCharacter,1)
+ LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2:
Key = LineList[0].split()
- if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not
in Keys:
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not
in Keys:
if ValueSplitFlag:
- Dictionary[Key[0]] =
LineList[1].replace('\\','/').split(ValueSplitCharacter)
+ Dictionary[Key[0]] = LineList[1].replace('\\',
'/').split(ValueSplitCharacter)
else:
- Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\', '/')
Keys += [Key[0]]
F.close()
return True
@@ -252,7 +252,7 @@
#
def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter,
KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try:
- F = open(FileName,'r')
+ F = open(FileName, 'r')
Lines = []
Lines = F.readlines()
F.close()
@@ -265,7 +265,7 @@
MaxLength = len(Key)
Index = 0
for Line in Lines:
- LineList = Line.split(KeySplitCharacter,1)
+ LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2:
Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in
Dictionary:
@@ -275,17 +275,17 @@
Line = '%-*s %c %s\n' % (MaxLength, Key[0],
KeySplitCharacter, Dictionary[Key[0]])
Lines.pop(Index)
if Key[0] in Keys:
- Lines.insert(Index,Line)
+ Lines.insert(Index, Line)
Keys.remove(Key[0])
Index += 1
for RemainingKey in Keys:
if ValueSplitFlag:
- Line = '%-*s %c %s\n' % (MaxLength, RemainingKey,
KeySplitCharacter,' '.join(Dictionary[RemainingKey]))
+ Line = '%-*s %c %s\n' % (MaxLength, RemainingKey,
KeySplitCharacter, ' '.join(Dictionary[RemainingKey]))
else:
Line = '%-*s %c %s\n' % (MaxLength, RemainingKey,
KeySplitCharacter, Dictionary[RemainingKey])
Lines.append(Line)
try:
- F = open(FileName,'w')
+ F = open(FileName, 'w')
except:
return False
F.writelines(Lines)
Modified: branches/UDK2015/BaseTools/Source/Python/Common/FdfParserLite.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/Common/FdfParserLite.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/Common/FdfParserLite.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -69,8 +69,8 @@
# @param File The FDF name
# @param Line The Line number that error occurs
#
- def __init__(self, Str, File = None, Line = None):
-
+ def __init__(self, Str, File=None, Line=None):
+
FileLineTuple = GetRealFileLine(File, Line)
self.FileName = FileLineTuple[0]
self.LineNumber = FileLineTuple[1]
@@ -359,8 +359,8 @@
else:
raise Warning("Macro not complete At Line ", self.FileName,
self.CurrentLineNumber)
return Str
-
- def __ReplaceFragment(self, StartPos, EndPos, Value = ' '):
+
+ def __ReplaceFragment(self, StartPos, EndPos, Value=' '):
if StartPos[0] == EndPos[0]:
Offset = StartPos[1]
while Offset <= EndPos[1]:
Modified: branches/UDK2015/BaseTools/Source/Python/Common/MigrationUtilities.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/Common/MigrationUtilities.py
2015-12-18 06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/Common/MigrationUtilities.py
2015-12-18 06:43:57 UTC (rev 19369)
@@ -423,7 +423,7 @@
Description = CommonHeader.Description
License = CommonHeader.License
- Header = "#/** @file\n#\n"
+ Header = "#/** @file\n#\n"
Header += "# " + Abstract + "\n#\n"
Header += "# " + Description.strip().replace("\n", "\n# ") + "\n"
Header += "# " + CopyRight + "\n#\n"
@@ -519,7 +519,7 @@
# @retval Options A optparse object containing the parsed options.
# @retval InputFile Path of an source file to be migrated.
#
-def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0):
+def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):
# use clearer usage to override default usage message
UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName
Version = "%s Version %.2f" % (ToolName, VersionNumber)
Modified: branches/UDK2015/BaseTools/Source/Python/Common/Misc.py
===================================================================
--- branches/UDK2015/BaseTools/Source/Python/Common/Misc.py 2015-12-18
06:41:38 UTC (rev 19368)
+++ branches/UDK2015/BaseTools/Source/Python/Common/Misc.py 2015-12-18
06:43:57 UTC (rev 19369)
@@ -38,7 +38,7 @@
from Common.MultipleWorkspace import MultipleWorkspace as mws
## Regular expression used to find out place holders in string template
-gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE)
+gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE |
re.UNICODE)
## Dictionary used to store file time stamp for quick re-access
gFileTimeStampCache = {} # {file path : file time stamp}
@@ -293,11 +293,11 @@
def GuidStringToGuidStructureString(Guid):
GuidList = Guid.split('-')
Result = '{'
- for Index in range(0,3,1):
+ for Index in range(0, 3, 1):
Result = Result + '0x' + GuidList[Index] + ', '
Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]
- for Index in range(0,12,2):
- Result = Result + ', 0x' + GuidList[4][Index:Index+2]
+ for Index in range(0, 12, 2):
+ Result = Result + ', 0x' + GuidList[4][Index:Index + 2]
Result += '}}'
return Result
@@ -494,7 +494,7 @@
Fd.write(Content)
Fd.close()
except IOError, X:
- EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s'%X)
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
return True
@@ -613,7 +613,7 @@
#
# @retval A list of all files
#
-def GetFiles(Root, SkipList=None, FullPath = True):
+def GetFiles(Root, SkipList=None, FullPath=True):
OriPath = Root
FileList = []
for Root, Dirs, Files in os.walk(Root):
@@ -663,7 +663,7 @@
if OverrideDir[-1] == os.path.sep:
return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]
else:
- return NewFile[len(OverrideDir)+1:],
NewFile[0:len(OverrideDir)]
+ return NewFile[len(OverrideDir) + 1:],
NewFile[0:len(OverrideDir)]
if GlobalData.gAllFiles:
NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir,
File))]
if not NewFile:
@@ -675,7 +675,7 @@
if Dir[-1] == os.path.sep:
return NewFile[len(Dir):], NewFile[0:len(Dir)]
else:
- return NewFile[len(Dir)+1:], NewFile[0:len(Dir)]
+ return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]
else:
return NewFile, ''
@@ -701,7 +701,7 @@
# Replace the default dir to current dir
if Dir == '.':
Dir = os.getcwd()
- Dir = Dir[len(Workspace)+1:]
+ Dir = Dir[len(Workspace) + 1:]
# First check if File has Edk definition itself
if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1:
@@ -740,7 +740,7 @@
# Dir is current module dir related to workspace
if Dir == '.':
Dir = os.getcwd()
- Dir = Dir[len(Workspace)+1:]
+ Dir = Dir[len(Workspace) + 1:]
NewFile = File
RelaPath = AllFiles[os.path.normpath(Dir)]
@@ -865,7 +865,7 @@
#
# PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint
#
- for PlaceHolder,Start,End in PlaceHolderList:
+ for PlaceHolder, Start, End in PlaceHolderList:
self._SubSectionList.append(TemplateSection[SubSectionStart:Start])
self._SubSectionList.append(TemplateSection[Start:End])
self._PlaceHolderList.append(PlaceHolder)
@@ -1251,11 +1251,11 @@
if len(key) > 1:
RestKeys = key[1:]
elif self._Level_ > 1:
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
else:
FirstKey = key
if self._Level_ > 1:
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
if FirstKey == None or str(FirstKey).upper() in
self._ValidWildcardList:
FirstKey = self._Wildcard
@@ -1328,11 +1328,11 @@
if len(key) > 1:
RestKeys = key[1:]
else:
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
else:
FirstKey = key
if self._Level_ > 1:
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
if FirstKey in self._ValidWildcardList:
FirstKey = self._Wildcard
@@ -1437,7 +1437,7 @@
Pair += 1
elif ch == ')' and not InStr:
Pair -= 1
-
+
if (Pair > 0 or InStr) and ch == TAB_VALUE_SPLIT:
NewStr += '-'
else:
@@ -1491,7 +1491,7 @@
IsValid = (len(FieldList) <= 3)
else:
IsValid = (len(FieldList) <= 1)
- return [Value, Type, Size], IsValid, 0
+ return [Value, Type, Size], IsValid, 0
elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD):
VpdOffset = FieldList[0]
Value = Size = ''
@@ -1532,17 +1532,17 @@
#
# @retval ValueList: A List contain value, datum type and toke number.
#
-def AnalyzePcdData(Setting):
- ValueList = ['', '', '']
-
- ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
+def AnalyzePcdData(Setting):
+ ValueList = ['', '', '']
+
+ ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(Setting)
ValueUpdateFlag = False
if len(PtrValue) >= 1:
Setting = re.sub(ValueRe, '', Setting)
- ValueUpdateFlag = True
+ ValueUpdateFlag = True
TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
@@ -1578,17 +1578,17 @@
#
# @retval ValueList: A List contain VpdOffset, MaxDatumSize and
InitialValue.
#
-def AnalyzeVpdPcdData(Setting):
- ValueList = ['', '', '']
-
- ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
+def AnalyzeVpdPcdData(Setting):
+ ValueList = ['', '', '']
+
+ ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
PtrValue = ValueRe.findall(Setting)
ValueUpdateFlag = False
if len(PtrValue) >= 1:
Setting = re.sub(ValueRe, '', Setting)
@@ Diff output truncated at 100000 characters. @@
------------------------------------------------------------------------------
_______________________________________________
edk2-commits mailing list
[email protected]
https://lists.sourceforge.net/lists/listinfo/edk2-commits