Since .utf8 UTF-8 unicode files might contain strings with unicode code points larger than 16-bits, and UEFI only supports UTF-16 characters, we need to make sure that BaseTools rejects these strings.
Cc: Yingke D Liu <yingke.d....@intel.com> Contributed-under: TianoCore Contribution Agreement 1.0 Signed-off-by: Jordan Justen <jordan.l.jus...@intel.com> --- BaseTools/Tests/CheckUnicodeSourceFiles.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/BaseTools/Tests/CheckUnicodeSourceFiles.py b/BaseTools/Tests/CheckUnicodeSourceFiles.py index bacbaf6..3757558 100644 --- a/BaseTools/Tests/CheckUnicodeSourceFiles.py +++ b/BaseTools/Tests/CheckUnicodeSourceFiles.py @@ -95,6 +95,23 @@ class Tests(TestTools.BaseToolsTest): def testUtf8InUtf8File(self): self.CheckFile('.utf8', 'utf_8', shouldFail=False) + def test32bitUnicodeCharInUtf8File(self): + data = u''' + #langdef en-US "English" + #string STR_A #language en-US "CodePoint (\U00010300) > 0xFFFF" + ''' + + path = self.EncodeToFile('.utf8', 'utf_8', string=data) + try: + BtUni.UniFileClassObject([path]) + except EdkLogger.FatalError: + return + except Exception: + pass + + self.fail('A unicode code point larger than 0xffff ' + + 'should raise EdkLogger.FatalError') + TheTestSuite = TestTools.MakeTheTestSuite(locals()) if __name__ == '__main__': -- 2.1.4 ------------------------------------------------------------------------------ One dashboard for servers and applications across Physical-Virtual-Cloud Widest out-of-the-box monitoring support with 50+ applications Performance metrics, stats and reports that give you Actionable Insights Deep dive visibility with transaction tracing using APM Insight. http://ad.doubleclick.net/ddm/clk/290420510;117567292;y _______________________________________________ edk2-devel mailing list edk2-devel@lists.sourceforge.net https://lists.sourceforge.net/lists/listinfo/edk2-devel