Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-pytokens for openSUSE:Factory 
checked in at 2025-11-12 21:41:44
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-pytokens (Old)
 and      /work/SRC/openSUSE:Factory/.python-pytokens.new.1980 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-pytokens"

Wed Nov 12 21:41:44 2025 rev:4 rq:1317160 version:0.3.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-pytokens/python-pytokens.changes  
2025-10-30 17:09:22.269506560 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-pytokens.new.1980/python-pytokens.changes    
    2025-11-12 21:42:33.981113097 +0100
@@ -1,0 +2,7 @@
+Tue Nov 11 16:36:34 UTC 2025 - Dirk Müller <[email protected]>
+
+- update to 0.3.0:
+  * add t-string support
+  * bugfixes
+
+-------------------------------------------------------------------

Old:
----
  pytokens-0.2.0.tar.gz

New:
----
  pytokens-0.3.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-pytokens.spec ++++++
--- /var/tmp/diff_new_pack.hvoQi2/_old  2025-11-12 21:42:34.537136451 +0100
+++ /var/tmp/diff_new_pack.hvoQi2/_new  2025-11-12 21:42:34.541136619 +0100
@@ -18,7 +18,7 @@
 
 %{?sle15_python_module_pythons}
 Name:           python-pytokens
-Version:        0.2.0
+Version:        0.3.0
 Release:        0
 Summary:        A Fast, spec compliant Python 3.12+ tokenizer that runs on 
older Pythons
 License:        MIT

++++++ pytokens-0.2.0.tar.gz -> pytokens-0.3.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.2.0/MANIFEST.in 
new/pytokens-0.3.0/MANIFEST.in
--- old/pytokens-0.2.0/MANIFEST.in      2025-10-15 10:02:14.000000000 +0200
+++ new/pytokens-0.3.0/MANIFEST.in      2025-11-05 14:35:23.000000000 +0100
@@ -1 +1 @@
-recursive-include tests *
\ No newline at end of file
+recursive-include tests *.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.2.0/PKG-INFO new/pytokens-0.3.0/PKG-INFO
--- old/pytokens-0.2.0/PKG-INFO 2025-10-15 10:02:34.391293300 +0200
+++ new/pytokens-0.3.0/PKG-INFO 2025-11-05 14:36:05.334748300 +0100
@@ -1,7 +1,7 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.4
 Name: pytokens
-Version: 0.2.0
-Summary: A Fast, spec compliant Python 3.13+ tokenizer that runs on older 
Pythons.
+Version: 0.3.0
+Summary: A Fast, spec compliant Python 3.14+ tokenizer that runs on older 
Pythons.
 Home-page: https://github.com/tusharsadhwani/pytokens
 Author: Tushar Sadhwani
 Author-email: [email protected]
@@ -30,10 +30,11 @@
 Requires-Dist: tox; extra == "dev"
 Requires-Dist: twine; extra == "dev"
 Requires-Dist: wheel; extra == "dev"
+Dynamic: license-file
 
 # pytokens
 
-A Fast, spec compliant Python 3.13+ tokenizer that runs on older Pythons.
+A Fast, spec compliant Python 3.14+ tokenizer that runs on older Pythons.
 
 ## Installation
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.2.0/README.md new/pytokens-0.3.0/README.md
--- old/pytokens-0.2.0/README.md        2025-10-15 10:02:14.000000000 +0200
+++ new/pytokens-0.3.0/README.md        2025-11-05 14:35:23.000000000 +0100
@@ -1,6 +1,6 @@
 # pytokens
 
-A Fast, spec compliant Python 3.13+ tokenizer that runs on older Pythons.
+A Fast, spec compliant Python 3.14+ tokenizer that runs on older Pythons.
 
 ## Installation
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.2.0/setup.cfg new/pytokens-0.3.0/setup.cfg
--- old/pytokens-0.2.0/setup.cfg        2025-10-15 10:02:34.391669000 +0200
+++ new/pytokens-0.3.0/setup.cfg        2025-11-05 14:36:05.335134000 +0100
@@ -1,7 +1,7 @@
 [metadata]
 name = pytokens
-version = 0.2.0
-description = A Fast, spec compliant Python 3.13+ tokenizer that runs on older 
Pythons.
+version = 0.3.0
+description = A Fast, spec compliant Python 3.14+ tokenizer that runs on older 
Pythons.
 long_description = file: README.md
 long_description_content_type = text/markdown
 url = https://github.com/tusharsadhwani/pytokens
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.2.0/src/pytokens/__init__.py 
new/pytokens-0.3.0/src/pytokens/__init__.py
--- old/pytokens-0.2.0/src/pytokens/__init__.py 2025-10-15 09:44:07.000000000 
+0200
+++ new/pytokens-0.3.0/src/pytokens/__init__.py 2025-11-05 14:35:23.000000000 
+0100
@@ -62,9 +62,13 @@
     fstring_middle = 22
     fstring_end = 23
 
-    endmarker = 24
+    tstring_start = 24
+    tstring_middle = 25
+    tstring_end = 26
 
-    errortoken = 25
+    endmarker = 27
+
+    errortoken = 28
 
     def __repr__(self) -> str:
         return f"TokenType.{self.name}"
@@ -250,6 +254,14 @@
         return False
 
     def make_token(self, tok_type: TokenType) -> Token:
+        if self.fstring_prefix is not None and "t" in self.fstring_prefix:
+            if tok_type == TokenType.fstring_start:
+                tok_type = TokenType.tstring_start
+            elif tok_type == TokenType.fstring_middle:
+                tok_type = TokenType.tstring_middle
+            elif tok_type == TokenType.fstring_end:
+                tok_type = TokenType.tstring_end
+
         token_type = (
             TokenType.op
             if self.weird_op_case
@@ -536,6 +548,7 @@
             FStringState.in_fstring_expr,
         ):
             prefix, quote = self.string_prefix_and_quotes()
+
             self.push_fstring_prefix_quote(prefix, quote)
             for _ in range(len(prefix)):
                 self.advance()
@@ -615,9 +628,10 @@
             assert self.fstring_quote is not None
             for _ in range(len(self.fstring_quote)):
                 self.advance()
+            token = self.make_token(TokenType.fstring_end)
             self.pop_fstring_quote()
             self.fstring_state.leave_fstring()
-            return self.make_token(TokenType.fstring_end)
+            return token
 
         if self.fstring_state.state == FStringState.in_fstring_expr_modifier:
             start_index = self.current_index
@@ -653,7 +667,7 @@
             return self.make_token(tok_type=TokenType.op)
 
         for char in prefix:
-            if char == "f" or char == "F":
+            if char in ("f", "F", "t", "T"):
                 return self.fstring()
 
         for _ in range(len(prefix)):
@@ -1076,6 +1090,8 @@
                     "f'",
                     'u"',
                     "u'",
+                    "t'",
+                    't"',
                     ignore_case=True,
                 )
             )
@@ -1090,6 +1106,10 @@
                     "fr'",
                     'rf"',
                     "rf'",
+                    "tr'",
+                    'tr"',
+                    "rt'",
+                    'rt"',
                     ignore_case=True,
                 )
             )
@@ -1115,7 +1135,7 @@
 def merge_fstring_tokens(token_iterator: TokenIterator) -> Iterator[Token]:
     """Turn post-Python-3.12 FSTRING-* tokens back to a single STRING token."""
     for token in token_iterator:
-        if token.type != TokenType.fstring_start:
+        if token.type not in (TokenType.fstring_start, 
TokenType.tstring_start):
             yield token
             continue
 
@@ -1125,9 +1145,9 @@
         fstring_starts = 1
         fstring_ends = 0
         for token in token_iterator:
-            if token.type == TokenType.fstring_start:
+            if token.type in (TokenType.fstring_start, 
TokenType.tstring_start):
                 fstring_starts += 1
-            if token.type == TokenType.fstring_end:
+            if token.type in (TokenType.fstring_end, TokenType.tstring_end):
                 fstring_ends += 1
 
             if fstring_starts == fstring_ends:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.2.0/src/pytokens.egg-info/PKG-INFO 
new/pytokens-0.3.0/src/pytokens.egg-info/PKG-INFO
--- old/pytokens-0.2.0/src/pytokens.egg-info/PKG-INFO   2025-10-15 
10:02:34.000000000 +0200
+++ new/pytokens-0.3.0/src/pytokens.egg-info/PKG-INFO   2025-11-05 
14:36:05.000000000 +0100
@@ -1,7 +1,7 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.4
 Name: pytokens
-Version: 0.2.0
-Summary: A Fast, spec compliant Python 3.13+ tokenizer that runs on older 
Pythons.
+Version: 0.3.0
+Summary: A Fast, spec compliant Python 3.14+ tokenizer that runs on older 
Pythons.
 Home-page: https://github.com/tusharsadhwani/pytokens
 Author: Tushar Sadhwani
 Author-email: [email protected]
@@ -30,10 +30,11 @@
 Requires-Dist: tox; extra == "dev"
 Requires-Dist: twine; extra == "dev"
 Requires-Dist: wheel; extra == "dev"
+Dynamic: license-file
 
 # pytokens
 
-A Fast, spec compliant Python 3.13+ tokenizer that runs on older Pythons.
+A Fast, spec compliant Python 3.14+ tokenizer that runs on older Pythons.
 
 ## Installation
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.2.0/src/pytokens.egg-info/SOURCES.txt 
new/pytokens-0.3.0/src/pytokens.egg-info/SOURCES.txt
--- old/pytokens-0.2.0/src/pytokens.egg-info/SOURCES.txt        2025-10-15 
10:02:34.000000000 +0200
+++ new/pytokens-0.3.0/src/pytokens.egg-info/SOURCES.txt        2025-11-05 
14:36:05.000000000 +0100
@@ -13,4 +13,5 @@
 src/pytokens.egg-info/requires.txt
 src/pytokens.egg-info/top_level.txt
 tests/pytokens_test.py
-tests/__pycache__/pytokens_test.cpython-312-pytest-8.3.4.pyc
\ No newline at end of file
+tests/__pycache__/pytokens_test.cpython-312-pytest-8.3.4.pyc
+tests/__pycache__/pytokens_test.cpython-314-pytest-8.4.2.pyc
\ No newline at end of file
Binary files 
old/pytokens-0.2.0/tests/__pycache__/pytokens_test.cpython-312-pytest-8.3.4.pyc 
and 
new/pytokens-0.3.0/tests/__pycache__/pytokens_test.cpython-312-pytest-8.3.4.pyc 
differ
Binary files 
old/pytokens-0.2.0/tests/__pycache__/pytokens_test.cpython-314-pytest-8.4.2.pyc 
and 
new/pytokens-0.3.0/tests/__pycache__/pytokens_test.cpython-314-pytest-8.4.2.pyc 
differ
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.2.0/tests/pytokens_test.py 
new/pytokens-0.3.0/tests/pytokens_test.py
--- old/pytokens-0.2.0/tests/pytokens_test.py   2025-10-15 10:02:14.000000000 
+0200
+++ new/pytokens-0.3.0/tests/pytokens_test.py   2025-11-05 14:35:23.000000000 
+0100
@@ -81,3 +81,29 @@
         Token(T.newline, 3, 4, start_line=2, start_col=2, end_line=2, 
end_col=3),
         Token(T.endmarker, 4, 4, start_line=3, start_col=0, end_line=3, 
end_col=0),
     ]
+
+
+def test_nested_f_tstrings() -> None:
+    source = '''t"foo {f'bar'} baz"'''
+    tokens = list(tokenize(source))
+    assert tokens == [
+        Token(T.tstring_start, 0, 2, start_line=1, start_col=0, end_line=1, 
end_col=2),
+        Token(T.tstring_middle, 2, 6, start_line=1, start_col=2, end_line=1, 
end_col=6),
+        Token(T.lbrace, 6, 7, start_line=1, start_col=6, end_line=1, 
end_col=7),
+        Token(T.fstring_start, 7, 9, start_line=1, start_col=7, end_line=1, 
end_col=9),
+        Token(
+            T.fstring_middle, 9, 12, start_line=1, start_col=9, end_line=1, 
end_col=12
+        ),
+        Token(
+            T.fstring_end, 12, 13, start_line=1, start_col=12, end_line=1, 
end_col=13
+        ),
+        Token(T.rbrace, 13, 14, start_line=1, start_col=13, end_line=1, 
end_col=14),
+        Token(
+            T.tstring_middle, 14, 18, start_line=1, start_col=14, end_line=1, 
end_col=18
+        ),
+        Token(
+            T.tstring_end, 18, 19, start_line=1, start_col=18, end_line=1, 
end_col=19
+        ),
+        Token(T.newline, 19, 20, start_line=1, start_col=19, end_line=1, 
end_col=20),
+        Token(T.endmarker, 20, 20, start_line=2, start_col=0, end_line=2, 
end_col=0),
+    ]

Reply via email to