commit 9558d863c830060b370d57b8716411089fd06f16
Author: Georg Baum <[email protected]>
Date:   Sat Jun 11 11:06:11 2016 +0200

    Some python 3 fixes for lyx2lyx
    
    These were found by 2to3 and later adapted to use the compatibility code
    which was already used in some parts of lyx2lyx, e.g. lyx_1_5.py.

diff --git a/lib/lyx2lyx/lyx_1_6.py b/lib/lyx2lyx/lyx_1_6.py
index 2f8ebd2..f431d5f 100644
--- a/lib/lyx2lyx/lyx_1_6.py
+++ b/lib/lyx2lyx/lyx_1_6.py
@@ -24,6 +24,16 @@ import sys, os
 
 from parser_tools import find_token, find_end_of, find_tokens, get_value
 
+# Provide support for both python 2 and 3
+PY2 = sys.version_info[0] == 2
+if not PY2:
+    text_type = str
+    unichr = chr
+else:
+    text_type = unicode
+    unichr = unichr
+# End of code to support for both python 2 and 3
+
 ####################################################################
 # Private helper functions
 
diff --git a/lib/lyx2lyx/lyx_2_2.py b/lib/lyx2lyx/lyx_2_2.py
index 942aa6a..073f815 100644
--- a/lib/lyx2lyx/lyx_2_2.py
+++ b/lib/lyx2lyx/lyx_2_2.py
@@ -39,6 +39,16 @@ from parser_tools import find_token, find_token_backwards, 
find_re, \
      find_end_of_inset, find_end_of_layout, find_nonempty_line, \
      get_containing_layout, get_value, check_token
 
+# Provide support for both python 2 and 3
+PY2 = sys.version_info[0] == 2
+if not PY2:
+    text_type = str
+    unichr = chr
+else:
+    text_type = unicode
+    unichr = unichr
+# End of code to support for both python 2 and 3
+
 ####################################################################
 # Private helper functions
 
@@ -796,7 +806,7 @@ def convert_specialchar_internal(document, forward):
             else:
                 i = j
             continue
-        for key, value in specialchars.iteritems():
+        for key, value in specialchars.items():
             if forward:
                 document.body[i] = document.body[i].replace("\\SpecialChar " + 
key, "\\SpecialChar " + value)
                 document.body[i] = 
document.body[i].replace("\\SpecialCharNoPassThru " + key, 
"\\SpecialCharNoPassThru " + value)
@@ -1155,7 +1165,7 @@ def convert_origin(document):
         else:
             origin = os.path.join("/systemlyxdir", relpath).replace('\\', '/') 
+ '/'
         if os.name != 'nt':
-            origin = unicode(origin, sys.getfilesystemencoding())
+            origin = text_type(origin, sys.getfilesystemencoding())
     document.header[i:i] = ["\\origin " + origin]
 
 
diff --git a/lib/lyx2lyx/test_parser_tools.py b/lib/lyx2lyx/test_parser_tools.py
index 7e34947..4af3225 100644
--- a/lib/lyx2lyx/test_parser_tools.py
+++ b/lib/lyx2lyx/test_parser_tools.py
@@ -65,28 +65,28 @@ class TestParserTools(unittest.TestCase):
     def test_check_token(self):
         line = "\\begin_layout Standard"
 
-        self.assertEquals(check_token(line, '\\begin_layout'), True)
-        self.assertEquals(check_token(line, 'Standard'), False)
+        self.assertEqual(check_token(line, '\\begin_layout'), True)
+        self.assertEqual(check_token(line, 'Standard'), False)
 
 
     def test_is_nonempty_line(self):
-        self.assertEquals(is_nonempty_line(lines[0]), False)
-        self.assertEquals(is_nonempty_line(lines[1]), True)
-        self.assertEquals(is_nonempty_line(" "*5), False)
+        self.assertEqual(is_nonempty_line(lines[0]), False)
+        self.assertEqual(is_nonempty_line(lines[1]), True)
+        self.assertEqual(is_nonempty_line(" "*5), False)
 
 
     def test_find_token(self):
-        self.assertEquals(find_token(lines, '\\emph', 0), 7)
-        self.assertEquals(find_token(lines, '\\emph', 0, 5), -1)
-        self.assertEquals(find_token(lines, '\\emp', 0, 0, True), -1)
-        self.assertEquals(find_token(lines, '\\emp', 0, 0, False), 7)
-        self.assertEquals(find_token(lines, 'emph', 0), -1)
+        self.assertEqual(find_token(lines, '\\emph', 0), 7)
+        self.assertEqual(find_token(lines, '\\emph', 0, 5), -1)
+        self.assertEqual(find_token(lines, '\\emp', 0, 0, True), -1)
+        self.assertEqual(find_token(lines, '\\emp', 0, 0, False), 7)
+        self.assertEqual(find_token(lines, 'emph', 0), -1)
 
 
     def test_find_tokens(self):
         tokens = ['\\emph', '\\end_inset']
-        self.assertEquals(find_tokens(lines, tokens, 0), 4)
-        self.assertEquals(find_tokens(lines, tokens, 0, 4), -1)
+        self.assertEqual(find_tokens(lines, tokens, 0), 4)
+        self.assertEqual(find_tokens(lines, tokens, 0, 4), -1)
 
 
 if __name__ == '__main__':  

Reply via email to