Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-asttokens for
openSUSE:Factory checked in at 2023-09-28 00:24:37
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-asttokens (Old)
and /work/SRC/openSUSE:Factory/.python-asttokens.new.23327 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-asttokens"
Thu Sep 28 00:24:37 2023 rev:9 rq:1113542 version:2.4.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-asttokens/python-asttokens.changes
2023-04-24 22:30:51.751385553 +0200
+++
/work/SRC/openSUSE:Factory/.python-asttokens.new.23327/python-asttokens.changes
2023-09-28 00:44:01.528960550 +0200
@@ -1,0 +2,6 @@
+Mon Sep 18 20:07:19 UTC 2023 - Dirk Müller <[email protected]>
+
+- update to 2.4.0:
+ * no upstream changelog available
+
+-------------------------------------------------------------------
Old:
----
asttokens-2.2.1.tar.gz
New:
----
asttokens-2.4.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-asttokens.spec ++++++
--- /var/tmp/diff_new_pack.KGbBvK/_old 2023-09-28 00:44:03.293024719 +0200
+++ /var/tmp/diff_new_pack.KGbBvK/_new 2023-09-28 00:44:03.305025155 +0200
@@ -22,7 +22,7 @@
%define skip_python36 1
%{?sle15_python_module_pythons}
Name: python-asttokens
-Version: 2.2.1
+Version: 2.4.0
Release: 0
Summary: Annotate AST trees with source code positions
License: Apache-2.0
++++++ asttokens-2.2.1.tar.gz -> asttokens-2.4.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/.github/workflows/build-and-test.yml
new/asttokens-2.4.0/.github/workflows/build-and-test.yml
--- old/asttokens-2.2.1/.github/workflows/build-and-test.yml 2022-12-05
11:33:06.000000000 +0100
+++ new/asttokens-2.4.0/.github/workflows/build-and-test.yml 2023-09-02
13:49:39.000000000 +0200
@@ -7,7 +7,7 @@
workflow_dispatch:
jobs:
test:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
@@ -20,6 +20,7 @@
- 3.9
- '3.10'
- 3.11
+ - 3.12.0-rc.1
# As per
https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#pypy
list of versions
- pypy-2.7
- pypy-3.6
@@ -30,13 +31,25 @@
COVERALLS_PARALLEL: true
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ if: matrix.python-version != '2.7'
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
+ - name: Set up Python 2.7
+ if: matrix.python-version == '2.7'
+ run: |
+ sudo apt-get update
+ sudo apt-get install python2.7 -y
+ # Get everything to use this new Python as the default.
+ curl https://bootstrap.pypa.io/pip/2.7/get-pip.py --output get-pip.py
+ sudo python2.7 get-pip.py
+ sudo ln -sf /usr/bin/pip2 /usr/bin/pip
+ sudo ln -sf /usr/bin/python2.7 /usr/bin/python
+
- name: Install dependencies
run: |
pip install --upgrade coveralls pytest setuptools setuptools_scm
pep517
@@ -68,10 +81,10 @@
mypy-py2:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up Python 3.9
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
with:
python-version: 3.9
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/.readthedocs.yaml
new/asttokens-2.4.0/.readthedocs.yaml
--- old/asttokens-2.2.1/.readthedocs.yaml 1970-01-01 01:00:00.000000000
+0100
+++ new/asttokens-2.4.0/.readthedocs.yaml 2023-09-02 13:49:39.000000000
+0200
@@ -0,0 +1,23 @@
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Set the version of Python and other tools you might need
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+
+# Build documentation in the docs/ directory with Sphinx
+sphinx:
+ configuration: docs/conf.py
+
+# We recommend specifying your dependencies to enable reproducible builds:
+# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
+python:
+ install:
+ - requirements: docs/requirements.txt
+ - method: pip
+ path: .
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/PKG-INFO new/asttokens-2.4.0/PKG-INFO
--- old/asttokens-2.2.1/PKG-INFO 2022-12-05 11:34:12.659172500 +0100
+++ new/asttokens-2.4.0/PKG-INFO 2023-09-04 18:54:45.001366600 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: asttokens
-Version: 2.2.1
+Version: 2.4.0
Summary: Annotate AST trees with source code positions
Home-page: https://github.com/gristlabs/asttokens
Author: Dmitry Sagalovskiy, Grist Labs
@@ -26,6 +26,7 @@
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Provides-Extra: test
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/asttokens/asttokens.py
new/asttokens-2.4.0/asttokens/asttokens.py
--- old/asttokens-2.2.1/asttokens/asttokens.py 2022-10-29 12:14:52.000000000
+0200
+++ new/asttokens-2.4.0/asttokens/asttokens.py 2023-09-02 13:49:39.000000000
+0200
@@ -18,13 +18,16 @@
import sys
import token
from ast import Module
-from typing import Iterable, Iterator, List, Optional, Tuple, Any, cast,
TYPE_CHECKING, Type
+from typing import Iterable, Iterator, List, Optional, Tuple, Any, cast,
TYPE_CHECKING
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from .line_numbers import LineNumbers
-from .util import Token, match_token, is_non_coding_token,
patched_generate_tokens, last_stmt, annotate_fstring_nodes, generate_tokens
+from .util import (
+ Token, match_token, is_non_coding_token, patched_generate_tokens, last_stmt,
+ annotate_fstring_nodes, generate_tokens, is_module, is_stmt
+)
if TYPE_CHECKING: # pragma: no cover
from .util import AstNode, TokenInfo
@@ -58,7 +61,7 @@
This means that if ``padded`` is True, the start position will be adjusted
to include
leading whitespace if ``node`` is a multiline statement.
"""
- raise NotImplementedError
+ raise NotImplementedError # pragma: no cover
def get_text_range(self, node, padded=True):
# type: (AstNode, bool) -> Tuple[int, int]
@@ -135,7 +138,7 @@
``tree`` arguments are set, but may be used manually with a separate AST
or Astroid tree.
"""
# The hard work of this class is done by MarkTokens
- from .mark_tokens import MarkTokens # to avoid import loops
+ from .mark_tokens import MarkTokens # to avoid import loops
MarkTokens(self).visit_tree(root_node)
def _translate_tokens(self, original_tokens):
@@ -228,7 +231,7 @@
"""
Looks for the first token, starting at start_token, that matches tok_type
and, if given, the
token string. Searches backwards if reverse is True. Returns ENDMARKER
token if not found (you
- can check it with `token.ISEOF(t.type)`.
+ can check it with `token.ISEOF(t.type)`).
"""
t = start_token
advance = self.prev_token if reverse else self.next_token
@@ -289,8 +292,6 @@
It also (sometimes) supports nodes inside f-strings, which ``ASTTokens``
doesn't.
- Astroid trees are not supported at all and will raise an error.
-
Some node types and/or Python versions are not supported.
In these cases the ``get_text*`` methods will fall back to using
``ASTTokens``
which incurs the usual setup cost the first time.
@@ -301,9 +302,6 @@
# FIXME: Strictly, the type of source_text is one of the six string types,
but hard to specify with mypy given
#
https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases
- if not isinstance(tree, (ast.AST, type(None))):
- raise NotImplementedError('ASTText only supports AST trees')
-
super(ASTText, self).__init__(source_text, filename)
self._tree = tree
@@ -332,26 +330,31 @@
return self._asttokens
def _get_text_positions_tokenless(self, node, padded):
- # type: (ast.AST, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]
+ # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]
"""
Version of ``get_text_positions()`` that doesn't use tokens.
"""
- if sys.version_info[:2] < (3, 8):
+ if sys.version_info[:2] < (3, 8): # pragma: no cover
+ # This is just for mpypy
raise AssertionError("This method should only be called internally after
checking supports_tokenless()")
- if isinstance(node, ast.Module):
+ if is_module(node):
# Modules don't have position info, so just return the range of the
whole text.
# The token-using method does something different, but its behavior
seems weird and inconsistent.
# For example, in a file with only comments, it only returns the first
line.
# It's hard to imagine a case when this matters.
return (1, 0), self._line_numbers.offset_to_line(len(self._text))
- if not hasattr(node, 'lineno'):
+ if getattr(node, 'lineno', None) is None:
return (1, 0), (1, 0)
assert node # tell mypy that node is not None, which we allowed up to
here for compatibility
decorators = getattr(node, 'decorator_list', [])
+ if not decorators:
+ # Astroid uses node.decorators.nodes instead of node.decorator_list.
+ decorators_node = getattr(node, 'decorators', None)
+ decorators = getattr(decorators_node, 'nodes', [])
if decorators:
# Function/Class definition nodes are marked by AST as starting at
def/class,
# not the first decorator. This doesn't match the token-using behavior,
@@ -360,18 +363,35 @@
else:
start_node = node
- if padded and last_stmt(node).lineno != node.lineno:
- # Include leading indentation for multiline statements.
+ start_lineno = start_node.lineno
+ end_node = last_stmt(node)
+
+ # Include leading indentation for multiline statements.
+ # This doesn't mean simple statements that happen to be on multiple lines,
+ # but compound statements where inner indentation matters.
+ # So we don't just compare node.lineno and node.end_lineno,
+ # we check for a contained statement starting on a different line.
+ if padded and (
+ start_lineno != end_node.lineno
+ or (
+ # Astroid docstrings aren't treated as separate statements.
+ # So to handle function/class definitions with a docstring but no
other body,
+ # we just check that the node is a statement with a docstring
+ # and spanning multiple lines in the simple, literal sense.
+ start_lineno != node.end_lineno
+ and getattr(node, "doc_node", None)
+ and is_stmt(node)
+ )
+ ):
start_col_offset = 0
else:
- start_col_offset = self._line_numbers.from_utf8_col(start_node.lineno,
start_node.col_offset)
+ start_col_offset = self._line_numbers.from_utf8_col(start_lineno,
start_node.col_offset)
- start = (start_node.lineno, start_col_offset)
+ start = (start_lineno, start_col_offset)
# To match the token-using behaviour, we exclude trailing semicolons and
comments.
# This means that for blocks containing multiple statements, we have to
use the last one
# instead of the actual node for end_lineno and end_col_offset.
- end_node = last_stmt(node)
end_lineno = cast(int, end_node.end_lineno)
end_col_offset = cast(int, end_node.end_col_offset)
end_col_offset = self._line_numbers.from_utf8_col(end_lineno,
end_col_offset)
@@ -401,19 +421,15 @@
# Node types that _get_text_positions_tokenless doesn't support. Only relevant
for Python 3.8+.
-_unsupported_tokenless_types = () # type: Tuple[Type[ast.AST], ...]
+_unsupported_tokenless_types = () # type: Tuple[str, ...]
if sys.version_info[:2] >= (3, 8):
- _unsupported_tokenless_types += (
- # no lineno
- ast.arguments, ast.withitem,
- )
+ # no lineno
+ _unsupported_tokenless_types += ("arguments", "Arguments", "withitem")
if sys.version_info[:2] == (3, 8):
- _unsupported_tokenless_types += (
- # _get_text_positions_tokenless works incorrectly for these types due to
bugs in Python 3.8.
- ast.arg, ast.Starred,
- # no lineno in 3.8
- ast.Slice, ast.ExtSlice, ast.Index, ast.keyword,
- )
+ # _get_text_positions_tokenless works incorrectly for these types due to
bugs in Python 3.8.
+ _unsupported_tokenless_types += ("arg", "Starred")
+ # no lineno in 3.8
+ _unsupported_tokenless_types += ("Slice", "ExtSlice", "Index", "keyword")
def supports_tokenless(node=None):
@@ -427,8 +443,10 @@
- Python 3.7 and earlier
- PyPy
- - Astroid nodes (``get_text*`` methods of ``ASTText`` will raise an error)
- - ``ast.arguments`` and ``ast.withitem``
+ - ``ast.arguments`` / ``astroid.Arguments``
+ - ``ast.withitem``
+ - ``astroid.Comprehension``
+ - ``astroid.AssignName`` inside ``astroid.Arguments`` or
``astroid.ExceptHandler``
- The following nodes in Python 3.8 only:
- ``ast.arg``
- ``ast.Starred``
@@ -438,8 +456,16 @@
- ``ast.keyword``
"""
return (
- isinstance(node, (ast.AST, type(None)))
- and not isinstance(node, _unsupported_tokenless_types)
+ type(node).__name__ not in _unsupported_tokenless_types
+ and not (
+ # astroid nodes
+ not isinstance(node, ast.AST) and node is not None and (
+ (
+ type(node).__name__ == "AssignName"
+ and type(node.parent).__name__ in ("Arguments", "ExceptHandler")
+ )
+ )
+ )
and sys.version_info[:2] >= (3, 8)
and 'pypy' not in sys.version.lower()
)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/asttokens/mark_tokens.py
new/asttokens-2.4.0/asttokens/mark_tokens.py
--- old/asttokens-2.2.1/asttokens/mark_tokens.py 2022-12-05
11:33:06.000000000 +0100
+++ new/asttokens-2.4.0/asttokens/mark_tokens.py 2023-09-02
13:49:39.000000000 +0200
@@ -361,7 +361,34 @@
last_token, # type: util.Token
):
# type: (...) -> Tuple[util.Token, util.Token]
- return self.handle_str(first_token, last_token)
+ if sys.version_info < (3, 12):
+ # Older versions don't tokenize the contents of f-strings
+ return self.handle_str(first_token, last_token)
+
+ last = first_token
+ while True:
+ if util.match_token(last, getattr(token, "FSTRING_START")):
+ # Python 3.12+ has tokens for the start (e.g. `f"`) and end (`"`)
+ # of the f-string. We can't just look for the next FSTRING_END
+ # because f-strings can be nested, e.g. f"{f'{x}'}", so we need
+ # to treat this like matching balanced parentheses.
+ count = 1
+ while count > 0:
+ last = self._code.next_token(last)
+ # mypy complains about token.FSTRING_START and token.FSTRING_END.
+ if util.match_token(last, getattr(token, "FSTRING_START")):
+ count += 1
+ elif util.match_token(last, getattr(token, "FSTRING_END")):
+ count -= 1
+ last_token = last
+ last = self._code.next_token(last_token)
+ elif util.match_token(last, token.STRING):
+ # Similar to handle_str, we also need to handle adjacent strings.
+ last_token = last
+ last = self._code.next_token(last_token)
+ else:
+ break
+ return (first_token, last_token)
def visit_bytes(self, node, first_token, last_token):
# type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/asttokens/util.py
new/asttokens-2.4.0/asttokens/util.py
--- old/asttokens-2.2.1/asttokens/util.py 2022-12-05 11:33:06.000000000
+0100
+++ new/asttokens-2.4.0/asttokens/util.py 2023-09-04 18:53:58.000000000
+0200
@@ -133,10 +133,9 @@
return iter_children_astroid if hasattr(node, 'get_children') else
iter_children_ast
-def iter_children_astroid(node):
- # type: (NodeNG) -> Union[Iterator, List]
- # Don't attempt to process children of JoinedStr nodes, which we can't fully
handle yet.
- if is_joined_str(node):
+def iter_children_astroid(node, include_joined_str=False):
+ # type: (NodeNG, bool) -> Union[Iterator, List]
+ if not include_joined_str and is_joined_str(node):
return []
return node.get_children()
@@ -145,10 +144,10 @@
SINGLETONS = {c for n, c in iteritems(ast.__dict__) if isinstance(c, type) and
issubclass(c, (ast.expr_context, ast.boolop, ast.operator,
ast.unaryop, ast.cmpop))}
-def iter_children_ast(node):
- # type: (AST) -> Iterator[Union[AST, expr]]
- # Don't attempt to process children of JoinedStr nodes, which we can't fully
handle yet.
- if is_joined_str(node):
+
+def iter_children_ast(node, include_joined_str=False):
+ # type: (AST, bool) -> Iterator[Union[AST, expr]]
+ if not include_joined_str and is_joined_str(node):
return
if isinstance(node, ast.Dict):
@@ -274,15 +273,17 @@
return ret
-
-def walk(node):
- # type: (AST) -> Iterator[Union[Module, AstNode]]
+def walk(node, include_joined_str=False):
+ # type: (AST, bool) -> Iterator[Union[Module, AstNode]]
"""
Recursively yield all descendant nodes in the tree starting at ``node``
(including ``node``
itself), using depth-first pre-order traversal (yieling parents before their
children).
This is similar to ``ast.walk()``, but with a different order, and it works
for both ``ast`` and
``astroid`` trees. Also, as ``iter_children()``, it skips singleton nodes
generated by ``ast``.
+
+ By default, ``JoinedStr`` (f-string) nodes and their contents are skipped
+ because they previously couldn't be handled. Set ``include_joined_str`` to
True to include them.
"""
iter_children = iter_children_func(node)
done = set()
@@ -297,7 +298,7 @@
# Insert all children in reverse order (so that first child ends up on top
of the stack).
# This is faster than building a list and reversing it.
ins = len(stack)
- for c in iter_children(current):
+ for c in iter_children(current, include_joined_str):
stack.insert(ins, c)
@@ -397,8 +398,15 @@
Otherwise, just return the node.
"""
child_stmts = [
- child for child in ast.iter_child_nodes(node)
- if isinstance(child, (ast.stmt, ast.excepthandler, getattr(ast,
"match_case", ())))
+ child for child in iter_children_func(node)(node)
+ if is_stmt(child) or type(child).__name__ in (
+ "excepthandler",
+ "ExceptHandler",
+ "match_case",
+ "MatchCase",
+ "TryExcept",
+ "TryFinally",
+ )
]
if child_stmts:
return last_stmt(child_stmts[-1])
@@ -418,12 +426,17 @@
Specifically this checks:
- Values with a format spec or conversion
- Repeated (i.e. identical-looking) expressions
- - Multiline f-strings implicitly concatenated.
+ - f-strings implicitly concatenated over multiple lines.
+ - Multiline, triple-quoted f-strings.
"""
source = """(
f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}"
f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}"
f"{x + y + z} {x} {y} {z} {z} {z!a} {z:z}"
+ f'''
+ {s} {t}
+ {u} {v}
+ '''
)"""
tree = ast.parse(source)
name_nodes = [node for node in ast.walk(tree) if isinstance(node,
ast.Name)]
@@ -441,7 +454,11 @@
Add a special attribute `_broken_positions` to nodes inside f-strings
if the lineno/col_offset cannot be trusted.
"""
- for joinedstr in walk(tree):
+ if sys.version_info >= (3, 12):
+ # f-strings were weirdly implemented until
https://peps.python.org/pep-0701/
+ # In Python 3.12, inner nodes have sensible positions.
+ return
+ for joinedstr in walk(tree, include_joined_str=True):
if not isinstance(joinedstr, ast.JoinedStr):
continue
for part in joinedstr.values:
@@ -456,10 +473,7 @@
if part.format_spec: # this is another JoinedStr
# Again, the standard positions span the full f-string.
setattr(part.format_spec, '_broken_positions', True)
- # Recursively handle this inner JoinedStr in the same way.
- # While this is usually automatic for other nodes,
- # the children of f-strings are explicitly excluded in
iter_children_ast.
- annotate_fstring_nodes(part.format_spec)
+
else:
def fstring_positions_work():
# type: () -> bool
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/asttokens/version.py
new/asttokens-2.4.0/asttokens/version.py
--- old/asttokens-2.2.1/asttokens/version.py 2022-12-05 11:34:12.000000000
+0100
+++ new/asttokens-2.4.0/asttokens/version.py 2023-09-04 18:54:44.000000000
+0200
@@ -1 +1 @@
-__version__ = "2.2.1"
+__version__ = "2.4.0"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/asttokens.egg-info/PKG-INFO
new/asttokens-2.4.0/asttokens.egg-info/PKG-INFO
--- old/asttokens-2.2.1/asttokens.egg-info/PKG-INFO 2022-12-05
11:34:12.000000000 +0100
+++ new/asttokens-2.4.0/asttokens.egg-info/PKG-INFO 2023-09-04
18:54:44.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: asttokens
-Version: 2.2.1
+Version: 2.4.0
Summary: Annotate AST trees with source code positions
Home-page: https://github.com/gristlabs/asttokens
Author: Dmitry Sagalovskiy, Grist Labs
@@ -26,6 +26,7 @@
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Provides-Extra: test
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/asttokens.egg-info/SOURCES.txt
new/asttokens-2.4.0/asttokens.egg-info/SOURCES.txt
--- old/asttokens-2.2.1/asttokens.egg-info/SOURCES.txt 2022-12-05
11:34:12.000000000 +0100
+++ new/asttokens-2.4.0/asttokens.egg-info/SOURCES.txt 2023-09-04
18:54:44.000000000 +0200
@@ -1,5 +1,6 @@
.gitignore
.pylintrc
+.readthedocs.yaml
LICENSE
MANIFEST.in
Makefile
@@ -25,8 +26,9 @@
docs/Makefile
docs/api-index.rst
docs/conf.py
-docs/docs_requirements.txt
docs/index.rst
+docs/requirements.in
+docs/requirements.txt
docs/user-guide.rst
tests/__init__.py
tests/context.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/asttokens.egg-info/requires.txt
new/asttokens-2.4.0/asttokens.egg-info/requires.txt
--- old/asttokens-2.2.1/asttokens.egg-info/requires.txt 2022-12-05
11:34:12.000000000 +0100
+++ new/asttokens-2.4.0/asttokens.egg-info/requires.txt 2023-09-04
18:54:44.000000000 +0200
@@ -1,4 +1,4 @@
-six
+six>=1.12.0
[:python_version < "3.5"]
typing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/docs/api-index.rst
new/asttokens-2.4.0/docs/api-index.rst
--- old/asttokens-2.2.1/docs/api-index.rst 2021-02-25 10:21:33.000000000
+0100
+++ new/asttokens-2.4.0/docs/api-index.rst 2023-09-02 13:49:39.000000000
+0200
@@ -8,6 +8,11 @@
.. autoclass:: asttokens.ASTTokens
:members:
+ASTText
+---------
+.. autoclass:: asttokens.ASTText
+ :members:
+
LineNumbers
-----------
.. autoclass:: asttokens.LineNumbers
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/docs/conf.py
new/asttokens-2.4.0/docs/conf.py
--- old/asttokens-2.2.1/docs/conf.py 2021-02-25 10:21:33.000000000 +0100
+++ new/asttokens-2.4.0/docs/conf.py 2023-09-02 13:49:39.000000000 +0200
@@ -1,157 +1,30 @@
-# -*- coding: utf-8 -*-
+# Configuration file for the Sphinx documentation builder.
#
-# asttokens documentation build configuration file, created by
-# sphinx-quickstart on Sat Dec 10 13:00:48 2016.
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#
-# import os
-# import sys
-# sys.path.insert(0, os.path.abspath('.'))
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+# -- Project information -----------------------------------------------------
+#
https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
-# -- General configuration ------------------------------------------------
+project = 'asttokens'
+copyright = '2023, Grist Labs'
+author = 'Grist Labs'
-# If your documentation needs a minimal Sphinx version, state it here.
-#
-# needs_sphinx = '1.0'
+# -- General configuration ---------------------------------------------------
+#
https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.viewcode',
+ 'sphinx_rtd_theme',
+]
-# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
-
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'asttokens'
-copyright = u'2016, Grist Labs'
-author = u'Grist Labs'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = u'1.0'
-# The full version, including alpha/beta/rc tags.
-release = u'1.0.0'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#
-# This is also used if you do content translation via gettext catalogs.
-# Usually you set "language" from the command line for these cases.
-language = None
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# If true, `todo` and `todoList` produce output, else they produce nothing.
-todo_include_todos = False
-
-
-# -- Options for HTML output ----------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-html_theme = 'default'
-
-html_sidebars = { '**': ['globaltoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'] }
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#
-# html_theme_options = {}
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# -- Options for HTMLHelp output ------------------------------------------
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'asttokensdoc'
-
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
- # The paper size ('letterpaper' or 'a4paper').
- #
- # 'papersize': 'letterpaper',
-
- # The font size ('10pt', '11pt' or '12pt').
- #
- # 'pointsize': '10pt',
-
- # Additional stuff for the LaTeX preamble.
- #
- # 'preamble': '',
-
- # Latex figure (float) alignment
- #
- # 'figure_align': 'htbp',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- (master_doc, 'asttokens.tex', u'asttokens Documentation',
- u'Grist Labs', 'manual'),
-]
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- (master_doc, 'asttokens', u'asttokens Documentation',
- [author], 1)
-]
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- (master_doc, 'asttokens', u'asttokens Documentation',
- author, 'asttokens', 'One line description of project.',
- 'Miscellaneous'),
-]
+# -- Options for HTML output -------------------------------------------------
+#
https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+html_theme = "sphinx_rtd_theme"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/docs/docs_requirements.txt
new/asttokens-2.4.0/docs/docs_requirements.txt
--- old/asttokens-2.2.1/docs/docs_requirements.txt 2021-02-25
10:21:33.000000000 +0100
+++ new/asttokens-2.4.0/docs/docs_requirements.txt 1970-01-01
01:00:00.000000000 +0100
@@ -1,4 +0,0 @@
-Pygments
-Sphinx
-docutils
-sphinx-autobuild
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/docs/index.rst
new/asttokens-2.4.0/docs/index.rst
--- old/asttokens-2.2.1/docs/index.rst 2021-02-25 10:21:33.000000000 +0100
+++ new/asttokens-2.4.0/docs/index.rst 2023-09-02 13:49:39.000000000 +0200
@@ -1,5 +1,5 @@
.. asttokens documentation master file, created by
- sphinx-quickstart on Sat Dec 10 13:00:48 2016.
+ sphinx-quickstart on Mon Aug 7 11:16:41 2023.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
@@ -11,14 +11,14 @@
transformations.
.. toctree::
- :maxdepth: 2
+ :maxdepth: 2
- user-guide
- api-index
+ user-guide
+ api-index
License
-------
-Copyright 2016, Grist Labs, Inc. Licensed under the Apache License, Version
2.0.
+Copyright 2023, Grist Labs, Inc. Licensed under the Apache License, Version
2.0.
Indices and tables
==================
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/docs/requirements.in
new/asttokens-2.4.0/docs/requirements.in
--- old/asttokens-2.2.1/docs/requirements.in 1970-01-01 01:00:00.000000000
+0100
+++ new/asttokens-2.4.0/docs/requirements.in 2023-09-02 13:49:39.000000000
+0200
@@ -0,0 +1,8 @@
+# After updating, or to pick up newer versions, run:
+# env/bin/pip-compile -o docs/requirements.txt docs/requirements.in
+# To bring venv up-to-date, run:
+# env/bin/pip-sync docs/requirements.txt
+sphinx
+sphinx_rtd_theme
+readthedocs-sphinx-search
+six
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/docs/requirements.txt
new/asttokens-2.4.0/docs/requirements.txt
--- old/asttokens-2.2.1/docs/requirements.txt 1970-01-01 01:00:00.000000000
+0100
+++ new/asttokens-2.4.0/docs/requirements.txt 2023-09-02 13:49:39.000000000
+0200
@@ -0,0 +1,61 @@
+#
+# This file is autogenerated by pip-compile with Python 3.11
+# by the following command:
+#
+# pip-compile --config=pyproject.toml --output-file=docs/requirements.txt
docs/requirements.in
+#
+alabaster==0.7.13
+ # via sphinx
+babel==2.12.1
+ # via sphinx
+certifi==2023.7.22
+ # via requests
+charset-normalizer==3.2.0
+ # via requests
+docutils==0.18.1
+ # via
+ # sphinx
+ # sphinx-rtd-theme
+idna==3.4
+ # via requests
+imagesize==1.4.1
+ # via sphinx
+jinja2==3.1.2
+ # via sphinx
+markupsafe==2.1.3
+ # via jinja2
+packaging==23.1
+ # via sphinx
+pygments==2.16.1
+ # via sphinx
+readthedocs-sphinx-search==0.3.1
+ # via -r docs/requirements.in
+requests==2.31.0
+ # via sphinx
+six==1.16.0
+ # via -r docs/requirements.in
+snowballstemmer==2.2.0
+ # via sphinx
+sphinx==6.2.1
+ # via
+ # -r docs/requirements.in
+ # sphinx-rtd-theme
+ # sphinxcontrib-jquery
+sphinx-rtd-theme==1.2.2
+ # via -r docs/requirements.in
+sphinxcontrib-applehelp==1.0.4
+ # via sphinx
+sphinxcontrib-devhelp==1.0.2
+ # via sphinx
+sphinxcontrib-htmlhelp==2.0.1
+ # via sphinx
+sphinxcontrib-jquery==4.1
+ # via sphinx-rtd-theme
+sphinxcontrib-jsmath==1.0.1
+ # via sphinx
+sphinxcontrib-qthelp==1.0.3
+ # via sphinx
+sphinxcontrib-serializinghtml==1.1.5
+ # via sphinx
+urllib3==2.0.4
+ # via requests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/setup.cfg
new/asttokens-2.4.0/setup.cfg
--- old/asttokens-2.2.1/setup.cfg 2022-12-05 11:34:12.663172500 +0100
+++ new/asttokens-2.4.0/setup.cfg 2023-09-04 18:54:45.001366600 +0200
@@ -1,7 +1,6 @@
[metadata]
license_file = LICENSE
name = asttokens
-version = 2.0.7
author = Dmitry Sagalovskiy, Grist Labs
author_email = [email protected]
license = Apache 2.0
@@ -29,13 +28,14 @@
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
+ Programming Language :: Python :: 3.12
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
[options]
packages = asttokens
install_requires =
- six
+ six >= 1.12.0
typing; python_version < "3.5"
setup_requires = setuptools>=44; setuptools_scm[toml]>=3.4.3
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/tests/test_astroid.py
new/asttokens-2.4.0/tests/test_astroid.py
--- old/asttokens-2.2.1/tests/test_astroid.py 2022-11-29 12:31:12.000000000
+0100
+++ new/asttokens-2.4.0/tests/test_astroid.py 2023-09-02 13:49:39.000000000
+0200
@@ -35,5 +35,12 @@
@staticmethod
def create_asttokens(source):
builder = astroid.builder.AstroidBuilder()
- tree = builder.string_build(source)
+ try:
+ tree = builder.string_build(source)
+ except AttributeError as e:
+ raise AstroidTreeException(str(e))
return ASTTokens(source, tree=tree)
+
+
+class AstroidTreeException(Exception):
+ pass
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/tests/test_mark_tokens.py
new/asttokens-2.4.0/tests/test_mark_tokens.py
--- old/asttokens-2.2.1/tests/test_mark_tokens.py 2022-12-05
11:33:06.000000000 +0100
+++ new/asttokens-2.4.0/tests/test_mark_tokens.py 2023-09-02
13:49:39.000000000 +0200
@@ -36,7 +36,7 @@
def create_mark_checker(self, source, verify=True):
atok = self.create_asttokens(source)
- checker = tools.MarkChecker(atok, self.is_astroid_test)
+ checker = tools.MarkChecker(atok)
# The last token should always be an ENDMARKER
# None of the nodes should contain that token
@@ -507,6 +507,10 @@
@deco3()
def g(x):
pass
+
+ @deco4
+ class C:
+ pass
""")
m = self.create_mark_checker(source)
# The `arguments` node has bogus positions here (and whenever there are no
arguments). We
@@ -627,6 +631,8 @@
so it only tests all modules if the environment variable
ASTTOKENS_SLOW_TESTS has been set.
"""
+ from .test_astroid import AstroidTreeException
+
modules = list(sys.modules.values())
if not os.environ.get('ASTTOKENS_SLOW_TESTS'):
modules = modules[:20]
@@ -640,7 +646,7 @@
try:
filename = inspect.getsourcefile(module)
- except TypeError:
+ except Exception: # some modules raise weird errors
continue
if not filename:
@@ -657,20 +663,21 @@
if self.is_astroid_test and (
# Astroid fails with a syntax error if a type comment is on its
own line
re.search(r'^\s*# type: ', source, re.MULTILINE)
- # Astroid can fail on this file, specifically raising an exception
at this line of code:
- # lambda node: node.name == "NamedTuple" and node.parent.name
== "typing"
- # with the error:
- # AttributeError: 'If' object has no attribute 'name'
- # See https://github.com/gristlabs/asttokens/runs/7602147792
- # I think the code that causes the problem is:
- # if sys.version_info >= (3, 11):
- # NamedTuple = typing.NamedTuple
- or filename.endswith("typing_extensions.py")
):
print('Skipping', filename)
continue
- self.create_mark_checker(source)
+ try:
+ self.create_mark_checker(source)
+ except AstroidTreeException:
+ # Astroid sometimes fails with errors like:
+ # AttributeError: 'TreeRebuilder' object has no attribute
'visit_typealias'
+ # See
https://github.com/gristlabs/asttokens/actions/runs/6015907789/job/16318767911?pr=110
+ # Should be fixed in the next astroid release:
+ #
https://github.com/pylint-dev/pylint/issues/8782#issuecomment-1669967220
+ # Note that this exception is raised before asttokens is even
involved,
+ # it's purely an astroid bug that we can safely ignore.
+ continue
if six.PY3:
def test_dict_merge(self):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/tests/test_tokenless.py
new/asttokens-2.4.0/tests/test_tokenless.py
--- old/asttokens-2.2.1/tests/test_tokenless.py 2022-10-29 12:14:52.000000000
+0200
+++ new/asttokens-2.4.0/tests/test_tokenless.py 2023-09-02 13:49:39.000000000
+0200
@@ -2,8 +2,6 @@
import sys
import unittest
-import astroid
-
from asttokens import ASTText, supports_tokenless
from asttokens.util import fstring_positions_work
@@ -98,7 +96,7 @@
ast_text = ast.get_source_segment(source, node, padded=padded)
atok_text = atok.get_text(node, padded=padded)
if ast_text:
- if (
+ if sys.version_info < (3, 12) and (
ast_text.startswith("f") and isinstance(node, (ast.Str,
ast.FormattedValue))
or is_fstring_format_spec(node)
or (not fstring_positions_work() and is_fstring_internal_node(node))
@@ -114,16 +112,32 @@
),
)
- def test_lazy_asttext_astroid_errors(self):
- builder = astroid.builder.AstroidBuilder()
- tree = builder.string_build(source)
- with self.assertRaises(NotImplementedError):
- ASTText(source, tree)
+ def test_nested_fstrings(self):
+ f1 = 'f"a {1+2} b {3+4} c"'
+ f2 = "f'd {" + f1 + "} e'"
+ f3 = "f'''{" + f2 + "}{" + f1 + "}'''"
+ f4 = 'f"""{' + f3 + '}"""'
+ s = 'f = ' + f4
+ atok = ASTText(s)
+ self.assertEqual(atok.get_text(atok.tree), s)
+ n4 = atok.tree.body[0].value
+ n3 = n4.values[0].value
+ n2 = n3.values[0].value
+ n1 = n2.values[1].value
+ self.assertEqual(atok.get_text(n4), f4)
+ if fstring_positions_work():
+ self.assertEqual(atok.get_text(n3), f3)
+ self.assertEqual(atok.get_text(n2), f2)
+ self.assertEqual(atok.get_text(n1), f1)
+ else:
+ self.assertEqual(atok.get_text(n3), '')
+ self.assertEqual(atok.get_text(n2), '')
+ self.assertEqual(atok.get_text(n1), '')
class TestFstringPositionsWork(unittest.TestCase):
def test_fstring_positions_work(self):
self.assertEqual(
fstring_positions_work() and supports_tokenless(),
- sys.version_info >= (3, 9, 7),
+ sys.version_info >= (3, 10, 6),
)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/tests/test_util.py
new/asttokens-2.4.0/tests/test_util.py
--- old/asttokens-2.2.1/tests/test_util.py 2022-08-08 16:44:42.000000000
+0200
+++ new/asttokens-2.4.0/tests/test_util.py 2023-09-02 13:49:39.000000000
+0200
@@ -3,6 +3,7 @@
import ast
import io
+import sys
import token
import unittest
@@ -122,27 +123,38 @@
from asttokens.util import combine_tokens, patched_generate_tokens
text = "â·2=1"
- original_tokens = list(generate_tokens(io.StringIO(text).readline))[:4]
- assert original_tokens == [
- TokenInfo(ERRORTOKEN, string='â', start=(1, 0), end=(1, 1),
line='â·2=1'),
- TokenInfo(ERRORTOKEN, string='·', start=(1, 1), end=(1, 2),
line='â·2=1'),
- TokenInfo(NUMBER, string='2', start=(1, 2), end=(1, 3), line='â·2=1'),
- TokenInfo(OP, string='=', start=(1, 3), end=(1, 4), line='â·2=1'),
- ]
- assert combine_tokens(original_tokens[:1]) == [
- TokenInfo(NAME, string='â', start=(1, 0), end=(1, 1), line='â·2=1'),
- ]
- assert combine_tokens(original_tokens[:2]) == [
- TokenInfo(NAME, string='â·', start=(1, 0), end=(1, 2),
line='â·2=1'),
- ]
- assert combine_tokens(original_tokens[:3]) == [
- TokenInfo(NAME, string='â·2', start=(1, 0), end=(1, 3),
line='â·2=1'),
- ]
+ original_tokens = []
+ for tok in generate_tokens(io.StringIO(text).readline):
+ original_tokens.append(tok)
+ if tok.type == OP:
+ break
- assert list(patched_generate_tokens(iter(original_tokens))) == [
+ correct_tokens = [
TokenInfo(NAME, string='â·2', start=(1, 0), end=(1, 3),
line='â·2=1'),
TokenInfo(OP, string='=', start=(1, 3), end=(1, 4), line='â·2=1'),
]
+ if sys.version_info >= (3, 12):
+ # The tokenizing bug was fixed in 3.12, so the original tokens are
correct,
+ # rather than starting with false ERRORTOKENs.
+ assert original_tokens == correct_tokens
+ else:
+ assert original_tokens == [
+ TokenInfo(ERRORTOKEN, string='â', start=(1, 0), end=(1, 1),
line='â·2=1'),
+ TokenInfo(ERRORTOKEN, string='·', start=(1, 1), end=(1, 2),
line='â·2=1'),
+ TokenInfo(NUMBER, string='2', start=(1, 2), end=(1, 3),
line='â·2=1'),
+ TokenInfo(OP, string='=', start=(1, 3), end=(1, 4), line='â·2=1'),
+ ]
+ assert combine_tokens(original_tokens[:1]) == [
+ TokenInfo(NAME, string='â', start=(1, 0), end=(1, 1),
line='â·2=1'),
+ ]
+ assert combine_tokens(original_tokens[:2]) == [
+ TokenInfo(NAME, string='â·', start=(1, 0), end=(1, 2),
line='â·2=1'),
+ ]
+ assert combine_tokens(original_tokens[:3]) == [
+ TokenInfo(NAME, string='â·2', start=(1, 0), end=(1, 3),
line='â·2=1'),
+ ]
+
+ assert list(patched_generate_tokens(iter(original_tokens))) ==
correct_tokens
assert list(patched_generate_tokens(iter(original_tokens[:-1]))) == [
TokenInfo(NAME, string='â·2', start=(1, 0), end=(1, 3),
line='â·2=1'),
]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/asttokens-2.2.1/tests/tools.py
new/asttokens-2.4.0/tests/tools.py
--- old/asttokens-2.2.1/tests/tools.py 2022-10-30 16:08:51.000000000 +0100
+++ new/asttokens-2.4.0/tests/tools.py 2023-09-02 13:49:39.000000000 +0200
@@ -6,6 +6,8 @@
import re
import sys
+import astroid
+
from asttokens import util, supports_tokenless, ASTText
@@ -36,11 +38,10 @@
"""
Helper tool to parse and mark an AST tree, with useful methods for verifying
it.
"""
- def __init__(self, atok, is_astroid_test):
+ def __init__(self, atok):
self.atok = atok
self.all_nodes = collect_nodes_preorder(self.atok.tree)
- if not is_astroid_test:
- self.atext = ASTText(atok.text, atok.tree, atok.filename)
+ self.atext = ASTText(atok.text, atok.tree, atok.filename)
def get_nodes_at(self, line, col):
"""Returns all nodes that start with the token at the given position."""
@@ -80,6 +81,10 @@
tested_nodes = 0
for node in self.all_nodes:
+ # slices currently only get the correct tokens/text for ast, not astroid.
+ if util.is_slice(node) and test_case.is_astroid_test:
+ continue
+
text = self.atok.get_text(node)
self.check_get_text_tokenless(node, test_case, text)
@@ -89,10 +94,6 @@
util.is_module(node)):
continue
- # slices currently only get the correct tokens for ast, not astroid.
- if util.is_slice(node) and test_case.is_astroid_test:
- continue
-
# await is not allowed outside async functions below 3.7
# parsing again would give a syntax error
if 'await' in text and 'async def' not in text and sys.version_info <
(3, 7):
@@ -127,25 +128,43 @@
whether from `ASTTokens` or `ASTText`.
"""
- if test_case.is_astroid_test or not supports_tokenless():
+ if not supports_tokenless():
return
text_tokenless = self.atext.get_text(node)
if isinstance(node, ast.alias):
self._check_alias_tokenless(node, test_case, text_tokenless)
- elif isinstance(node, ast.Module):
+ elif util.is_module(node):
test_case.assertEqual(text_tokenless, self.atext._text)
+ elif isinstance(node, astroid.DictUnpack):
+ # This is a strange node that *seems* to represent just the `**` in
`{**foo}`
+ # (not `**foo` or `foo`), but text_tokenless is `foo`
+ # while `text` is just the first token of that.
+ # 'Fixing' either of these or making them match doesn't seem useful.
+ return
+ elif isinstance(node, astroid.Decorators):
+ # Another strange node where it's not worth making the two texts match
+ return
elif supports_tokenless(node):
- has_lineno = hasattr(node, 'lineno')
+ has_lineno = getattr(node, 'lineno', None) is not None
test_case.assertEqual(has_lineno, text_tokenless != '')
if has_lineno:
- test_case.assertEqual(text, text_tokenless, ast.dump(node))
+ if (
+ text != text_tokenless
+ and text_tokenless.startswith(text)
+ and text_tokenless[len(text):].strip().startswith('# type: ')
+ and test_case.is_astroid_test
+ ):
+ # astroid positions can include type comments, which we can ignore.
+ return
+ test_case.assertEqual(text, text_tokenless)
else:
# _get_text_positions_tokenless can't work with nodes without lineno.
# Double-check that such nodes are unusual.
test_case.assertFalse(util.is_stmt(node) or util.is_expr(node))
- with test_case.assertRaises(SyntaxError, msg=(text, ast.dump(node))):
- test_case.parse_snippet(text, node)
+ if not test_case.is_astroid_test:
+ with test_case.assertRaises(SyntaxError, msg=(text, ast.dump(node))):
+ test_case.parse_snippet(text, node)
def _check_alias_tokenless(self, node, test_case, text):
if sys.version_info < (3, 10):