Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-tokenize-rt for 
openSUSE:Factory checked in at 2026-03-11 20:49:42
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-tokenize-rt (Old)
 and      /work/SRC/openSUSE:Factory/.python-tokenize-rt.new.8177 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-tokenize-rt"

Wed Mar 11 20:49:42 2026 rev:8 rq:1337900 version:6.2.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-tokenize-rt/python-tokenize-rt.changes    
2025-06-04 20:28:46.649540023 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-tokenize-rt.new.8177/python-tokenize-rt.changes
  2026-03-11 20:49:49.748415059 +0100
@@ -1,0 +2,6 @@
+Tue Mar 10 08:18:24 UTC 2026 - Dirk Müller <[email protected]>
+
+- update to 6.2.0:
+  * handle tstring tokens
+
+-------------------------------------------------------------------

Old:
----
  tokenize-rt-6.1.0.tar.gz

New:
----
  tokenize-rt-6.2.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-tokenize-rt.spec ++++++
--- /var/tmp/diff_new_pack.iPyvnM/_old  2026-03-11 20:49:50.232434683 +0100
+++ /var/tmp/diff_new_pack.iPyvnM/_new  2026-03-11 20:49:50.232434683 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package python-tokenize-rt
 #
-# Copyright (c) 2025 SUSE LLC
+# Copyright (c) 2026 SUSE LLC and contributors
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -17,7 +17,7 @@
 
 
 Name:           python-tokenize-rt
-Version:        6.1.0
+Version:        6.2.0
 Release:        0
 Summary:        A wrapper around the stdlib `tokenize` which roundtrips
 License:        MIT

++++++ tokenize-rt-6.1.0.tar.gz -> tokenize-rt-6.2.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/tokenize-rt-6.1.0/.github/workflows/main.yml 
new/tokenize-rt-6.2.0/.github/workflows/main.yml
--- old/tokenize-rt-6.1.0/.github/workflows/main.yml    2024-10-22 
02:14:53.000000000 +0200
+++ new/tokenize-rt-6.2.0/.github/workflows/main.yml    2025-05-24 
01:47:47.000000000 +0200
@@ -8,12 +8,12 @@
 
 jobs:
   main-windows:
-    uses: asottile/workflows/.github/workflows/[email protected]
+    uses: asottile/workflows/.github/workflows/[email protected]
     with:
       env: '["py39"]'
       os: windows-latest
   main-linux:
-    uses: asottile/workflows/.github/workflows/[email protected]
+    uses: asottile/workflows/.github/workflows/[email protected]
     with:
       env: '["py39", "py310", "py311", "py312", "py313"]'
       os: ubuntu-latest
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/tokenize-rt-6.1.0/.pre-commit-config.yaml 
new/tokenize-rt-6.2.0/.pre-commit-config.yaml
--- old/tokenize-rt-6.1.0/.pre-commit-config.yaml       2024-10-22 
02:14:53.000000000 +0200
+++ new/tokenize-rt-6.2.0/.pre-commit-config.yaml       2025-05-24 
01:47:47.000000000 +0200
@@ -10,11 +10,11 @@
     -   id: name-tests-test
     -   id: requirements-txt-fixer
 -   repo: https://github.com/asottile/setup-cfg-fmt
-    rev: v2.5.0
+    rev: v2.8.0
     hooks:
     -   id: setup-cfg-fmt
 -   repo: https://github.com/asottile/reorder-python-imports
-    rev: v3.13.0
+    rev: v3.14.0
     hooks:
     -   id: reorder-python-imports
         args: [--py39-plus, --add-import, 'from __future__ import annotations']
@@ -23,19 +23,19 @@
     hooks:
     -   id: add-trailing-comma
 -   repo: https://github.com/asottile/pyupgrade
-    rev: v3.17.0
+    rev: v3.19.1
     hooks:
     -   id: pyupgrade
         args: [--py39-plus]
 -   repo: https://github.com/hhatto/autopep8
-    rev: v2.3.1
+    rev: v2.3.2
     hooks:
     -   id: autopep8
 -   repo: https://github.com/PyCQA/flake8
-    rev: 7.1.1
+    rev: 7.2.0
     hooks:
     -   id: flake8
 -   repo: https://github.com/pre-commit/mirrors-mypy
-    rev: v1.11.2
+    rev: v1.15.0
     hooks:
     -   id: mypy
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/tokenize-rt-6.1.0/setup.cfg 
new/tokenize-rt-6.2.0/setup.cfg
--- old/tokenize-rt-6.1.0/setup.cfg     2024-10-22 02:14:53.000000000 +0200
+++ new/tokenize-rt-6.2.0/setup.cfg     2025-05-24 01:47:47.000000000 +0200
@@ -1,6 +1,6 @@
 [metadata]
 name = tokenize_rt
-version = 6.1.0
+version = 6.2.0
 description = A wrapper around the stdlib `tokenize` which roundtrips.
 long_description = file: README.md
 long_description_content_type = text/markdown
@@ -10,7 +10,6 @@
 license = MIT
 license_files = LICENSE
 classifiers =
-    License :: OSI Approved :: MIT License
     Programming Language :: Python :: 3
     Programming Language :: Python :: 3 :: Only
     Programming Language :: Python :: Implementation :: CPython
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/tokenize-rt-6.1.0/tests/tokenize_rt_test.py 
new/tokenize-rt-6.2.0/tests/tokenize_rt_test.py
--- old/tokenize-rt-6.1.0/tests/tokenize_rt_test.py     2024-10-22 
02:14:53.000000000 +0200
+++ new/tokenize-rt-6.2.0/tests/tokenize_rt_test.py     2025-05-24 
01:47:47.000000000 +0200
@@ -202,6 +202,25 @@
         ]
 
 
[email protected](sys.version_info < (3, 14), reason='3.14+')
+def test_src_to_tokens_tstring_with_escapes():  # pragma: >=3.14 cover
+    src = 't" a {{ {b} }} c"'
+    ret = src_to_tokens(src)
+    assert ret == [
+        Token(name='TSTRING_START', src='t"', line=1, utf8_byte_offset=0),
+        Token(name='TSTRING_MIDDLE', src=' a {{', line=1, utf8_byte_offset=2), 
 # noqa: E501
+        Token(name='TSTRING_MIDDLE', src=' ', line=1, utf8_byte_offset=7),
+        Token(name='OP', src='{', line=1, utf8_byte_offset=8),
+        Token(name='NAME', src='b', line=1, utf8_byte_offset=9),
+        Token(name='OP', src='}', line=1, utf8_byte_offset=10),
+        Token(name='TSTRING_MIDDLE', src=' }}', line=1, utf8_byte_offset=11),  
# noqa: E501
+        Token(name='TSTRING_MIDDLE', src=' c', line=1, utf8_byte_offset=14),  
# noqa: E501
+        Token(name='TSTRING_END', src='"', line=1, utf8_byte_offset=16),
+        Token(name='NEWLINE', src='', line=1, utf8_byte_offset=17),
+        Token(name='ENDMARKER', src='', line=2, utf8_byte_offset=0),
+    ]
+
+
 def test_src_to_tokens_fstring_with_named_escapes():
     src = r'f" \N{SNOWMAN} "'
     ret = src_to_tokens(src)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/tokenize-rt-6.1.0/tokenize_rt.py 
new/tokenize-rt-6.2.0/tokenize_rt.py
--- old/tokenize-rt-6.1.0/tokenize_rt.py        2024-10-22 02:14:53.000000000 
+0200
+++ new/tokenize-rt-6.2.0/tokenize_rt.py        2025-05-24 01:47:47.000000000 
+0200
@@ -110,7 +110,7 @@
 
         tok_name = tokenize.tok_name[tok_type]
 
-        if tok_name == 'FSTRING_MIDDLE':  # pragma: >=3.12 cover
+        if tok_name in {'FSTRING_MIDDLE', 'TSTRING_MIDDLE'}:  # pragma: >=3.12 
cover  # noqa: E501
             if '{' in tok_text or '}' in tok_text:
                 new_tok_text = curly_escape(tok_text)
                 ecol += len(new_tok_text) - len(tok_text)

Reply via email to