Author: Carl Friedrich Bolz-Tereick <[email protected]>
Branch: py3.7
Changeset: r98489:2779449dd1d5
Date: 2020-01-08 13:04 +0100
http://bitbucket.org/pypy/pypy/changeset/2779449dd1d5/
Log: merge py3.6
diff --git a/lib-python/3/asyncio/compat.py b/lib-python/3/asyncio/compat.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3/asyncio/compat.py
@@ -0,0 +1,18 @@
+"""Compatibility helpers for the different Python versions."""
+
+import sys
+
+PY34 = sys.version_info >= (3, 4)
+PY35 = sys.version_info >= (3, 5)
+PY352 = sys.version_info >= (3, 5, 2)
+
+
+def flatten_list_bytes(list_of_data):
+ """Concatenate a sequence of bytes-like objects."""
+ if not PY34:
+ # On Python 3.3 and older, bytes.join() doesn't handle
+ # memoryview.
+ list_of_data = (
+ bytes(data) if isinstance(data, memoryview) else data
+ for data in list_of_data)
+ return b''.join(list_of_data)
diff --git a/lib-python/3/asyncio/test_utils.py
b/lib-python/3/asyncio/test_utils.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3/asyncio/test_utils.py
@@ -0,0 +1,537 @@
+"""Utilities shared by tests."""
+
+import collections
+import contextlib
+import io
+import logging
+import os
+import re
+import socket
+import socketserver
+import sys
+import tempfile
+import threading
+import time
+import unittest
+import weakref
+
+from unittest import mock
+
+from http.server import HTTPServer
+from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
+
+try:
+ import ssl
+except ImportError: # pragma: no cover
+ ssl = None
+
+from . import base_events
+from . import compat
+from . import events
+from . import futures
+from . import selectors
+from . import tasks
+from .coroutines import coroutine
+from .log import logger
+from test import support
+
+
+if sys.platform == 'win32': # pragma: no cover
+ from .windows_utils import socketpair
+else:
+ from socket import socketpair # pragma: no cover
+
+
+def data_file(filename):
+ if hasattr(support, 'TEST_HOME_DIR'):
+ fullname = os.path.join(support.TEST_HOME_DIR, filename)
+ if os.path.isfile(fullname):
+ return fullname
+ fullname = os.path.join(os.path.dirname(os.__file__), 'test', filename)
+ if os.path.isfile(fullname):
+ return fullname
+ raise FileNotFoundError(filename)
+
+
+ONLYCERT = data_file('ssl_cert.pem')
+ONLYKEY = data_file('ssl_key.pem')
+
+
+def dummy_ssl_context():
+ if ssl is None:
+ return None
+ else:
+ return ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+
+
+def run_briefly(loop):
+ @coroutine
+ def once():
+ pass
+ gen = once()
+ t = loop.create_task(gen)
+ # Don't log a warning if the task is not done after run_until_complete().
+ # It occurs if the loop is stopped or if a task raises a BaseException.
+ t._log_destroy_pending = False
+ try:
+ loop.run_until_complete(t)
+ finally:
+ gen.close()
+
+
+def run_until(loop, pred, timeout=30):
+ deadline = time.time() + timeout
+ while not pred():
+ if timeout is not None:
+ timeout = deadline - time.time()
+ if timeout <= 0:
+ raise futures.TimeoutError()
+ loop.run_until_complete(tasks.sleep(0.001, loop=loop))
+
+
+def run_once(loop):
+ """Legacy API to run once through the event loop.
+
+ This is the recommended pattern for test code. It will poll the
+ selector once and run all callbacks scheduled in response to I/O
+ events.
+ """
+ loop.call_soon(loop.stop)
+ loop.run_forever()
+
+
+class SilentWSGIRequestHandler(WSGIRequestHandler):
+
+ def get_stderr(self):
+ return io.StringIO()
+
+ def log_message(self, format, *args):
+ pass
+
+
+class SilentWSGIServer(WSGIServer):
+
+ request_timeout = 2
+
+ def get_request(self):
+ request, client_addr = super().get_request()
+ request.settimeout(self.request_timeout)
+ return request, client_addr
+
+ def handle_error(self, request, client_address):
+ pass
+
+
+class SSLWSGIServerMixin:
+
+ def finish_request(self, request, client_address):
+ # The relative location of our test directory (which
+ # contains the ssl key and certificate files) differs
+ # between the stdlib and stand-alone asyncio.
+ # Prefer our own if we can find it.
+ keyfile = ONLYKEY
+ certfile = ONLYCERT
+ context = ssl.SSLContext()
+ context.load_cert_chain(certfile, keyfile)
+
+ ssock = context.wrap_socket(request, server_side=True)
+ try:
+ self.RequestHandlerClass(ssock, client_address, self)
+ ssock.close()
+ except OSError:
+ # maybe socket has been closed by peer
+ pass
+
+
+class SSLWSGIServer(SSLWSGIServerMixin, SilentWSGIServer):
+ pass
+
+
+def _run_test_server(*, address, use_ssl=False, server_cls, server_ssl_cls):
+
+ def app(environ, start_response):
+ status = '200 OK'
+ headers = [('Content-type', 'text/plain')]
+ start_response(status, headers)
+ return [b'Test message']
+
+ # Run the test WSGI server in a separate thread in order not to
+ # interfere with event handling in the main thread
+ server_class = server_ssl_cls if use_ssl else server_cls
+ httpd = server_class(address, SilentWSGIRequestHandler)
+ httpd.set_app(app)
+ httpd.address = httpd.server_address
+ server_thread = threading.Thread(
+ target=lambda: httpd.serve_forever(poll_interval=0.05))
+ server_thread.start()
+ try:
+ yield httpd
+ finally:
+ httpd.shutdown()
+ httpd.server_close()
+ server_thread.join()
+
+
+if hasattr(socket, 'AF_UNIX'):
+
+ class UnixHTTPServer(socketserver.UnixStreamServer, HTTPServer):
+
+ def server_bind(self):
+ socketserver.UnixStreamServer.server_bind(self)
+ self.server_name = '127.0.0.1'
+ self.server_port = 80
+
+
+ class UnixWSGIServer(UnixHTTPServer, WSGIServer):
+
+ request_timeout = 2
+
+ def server_bind(self):
+ UnixHTTPServer.server_bind(self)
+ self.setup_environ()
+
+ def get_request(self):
+ request, client_addr = super().get_request()
+ request.settimeout(self.request_timeout)
+ # Code in the stdlib expects that get_request
+ # will return a socket and a tuple (host, port).
+ # However, this isn't true for UNIX sockets,
+ # as the second return value will be a path;
+ # hence we return some fake data sufficient
+ # to get the tests going
+ return request, ('127.0.0.1', '')
+
+
+ class SilentUnixWSGIServer(UnixWSGIServer):
+
+ def handle_error(self, request, client_address):
+ pass
+
+
+ class UnixSSLWSGIServer(SSLWSGIServerMixin, SilentUnixWSGIServer):
+ pass
+
+
+ def gen_unix_socket_path():
+ with tempfile.NamedTemporaryFile() as file:
+ return file.name
+
+
+ @contextlib.contextmanager
+ def unix_socket_path():
+ path = gen_unix_socket_path()
+ try:
+ yield path
+ finally:
+ try:
+ os.unlink(path)
+ except OSError:
+ pass
+
+
+ @contextlib.contextmanager
+ def run_test_unix_server(*, use_ssl=False):
+ with unix_socket_path() as path:
+ yield from _run_test_server(address=path, use_ssl=use_ssl,
+ server_cls=SilentUnixWSGIServer,
+ server_ssl_cls=UnixSSLWSGIServer)
+
+
[email protected]
+def run_test_server(*, host='127.0.0.1', port=0, use_ssl=False):
+ yield from _run_test_server(address=(host, port), use_ssl=use_ssl,
+ server_cls=SilentWSGIServer,
+ server_ssl_cls=SSLWSGIServer)
+
+
+def make_test_protocol(base):
+ dct = {}
+ for name in dir(base):
+ if name.startswith('__') and name.endswith('__'):
+ # skip magic names
+ continue
+ dct[name] = MockCallback(return_value=None)
+ return type('TestProtocol', (base,) + base.__bases__, dct)()
+
+
+class TestSelector(selectors.BaseSelector):
+
+ def __init__(self):
+ self.keys = {}
+
+ def register(self, fileobj, events, data=None):
+ key = selectors.SelectorKey(fileobj, 0, events, data)
+ self.keys[fileobj] = key
+ return key
+
+ def unregister(self, fileobj):
+ return self.keys.pop(fileobj)
+
+ def select(self, timeout):
+ return []
+
+ def get_map(self):
+ return self.keys
+
+
+class TestLoop(base_events.BaseEventLoop):
+ """Loop for unittests.
+
+ It manages self time directly.
+ If something scheduled to be executed later then
+ on next loop iteration after all ready handlers done
+ generator passed to __init__ is calling.
+
+ Generator should be like this:
+
+ def gen():
+ ...
+ when = yield ...
+ ... = yield time_advance
+
+ Value returned by yield is absolute time of next scheduled handler.
+ Value passed to yield is time advance to move loop's time forward.
+ """
+
+ def __init__(self, gen=None):
+ super().__init__()
+
+ if gen is None:
+ def gen():
+ yield
+ self._check_on_close = False
+ else:
+ self._check_on_close = True
+
+ self._gen = gen()
+ next(self._gen)
+ self._time = 0
+ self._clock_resolution = 1e-9
+ self._timers = []
+ self._selector = TestSelector()
+
+ self.readers = {}
+ self.writers = {}
+ self.reset_counters()
+
+ self._transports = weakref.WeakValueDictionary()
+
+ def time(self):
+ return self._time
+
+ def advance_time(self, advance):
+ """Move test time forward."""
+ if advance:
+ self._time += advance
+
+ def close(self):
+ super().close()
+ if self._check_on_close:
+ try:
+ self._gen.send(0)
+ except StopIteration:
+ pass
+ else: # pragma: no cover
+ raise AssertionError("Time generator is not finished")
+
+ def _add_reader(self, fd, callback, *args):
+ self.readers[fd] = events.Handle(callback, args, self)
+
+ def _remove_reader(self, fd):
+ self.remove_reader_count[fd] += 1
+ if fd in self.readers:
+ del self.readers[fd]
+ return True
+ else:
+ return False
+
+ def assert_reader(self, fd, callback, *args):
+ if fd not in self.readers:
+ raise AssertionError(f'fd {fd} is not registered')
+ handle = self.readers[fd]
+ if handle._callback != callback:
+ raise AssertionError(
+ f'unexpected callback: {handle._callback} != {callback}')
+ if handle._args != args:
+ raise AssertionError(
+ f'unexpected callback args: {handle._args} != {args}')
+
+ def assert_no_reader(self, fd):
+ if fd in self.readers:
+ raise AssertionError(f'fd {fd} is registered')
+
+ def _add_writer(self, fd, callback, *args):
+ self.writers[fd] = events.Handle(callback, args, self)
+
+ def _remove_writer(self, fd):
+ self.remove_writer_count[fd] += 1
+ if fd in self.writers:
+ del self.writers[fd]
+ return True
+ else:
+ return False
+
+ def assert_writer(self, fd, callback, *args):
+ assert fd in self.writers, 'fd {} is not registered'.format(fd)
+ handle = self.writers[fd]
+ assert handle._callback == callback, '{!r} != {!r}'.format(
+ handle._callback, callback)
+ assert handle._args == args, '{!r} != {!r}'.format(
+ handle._args, args)
+
+ def _ensure_fd_no_transport(self, fd):
+ try:
+ transport = self._transports[fd]
+ except KeyError:
+ pass
+ else:
+ raise RuntimeError(
+ 'File descriptor {!r} is used by transport {!r}'.format(
+ fd, transport))
+
+ def add_reader(self, fd, callback, *args):
+ """Add a reader callback."""
+ self._ensure_fd_no_transport(fd)
+ return self._add_reader(fd, callback, *args)
+
+ def remove_reader(self, fd):
+ """Remove a reader callback."""
+ self._ensure_fd_no_transport(fd)
+ return self._remove_reader(fd)
+
+ def add_writer(self, fd, callback, *args):
+ """Add a writer callback.."""
+ self._ensure_fd_no_transport(fd)
+ return self._add_writer(fd, callback, *args)
+
+ def remove_writer(self, fd):
+ """Remove a writer callback."""
+ self._ensure_fd_no_transport(fd)
+ return self._remove_writer(fd)
+
+ def reset_counters(self):
+ self.remove_reader_count = collections.defaultdict(int)
+ self.remove_writer_count = collections.defaultdict(int)
+
+ def _run_once(self):
+ super()._run_once()
+ for when in self._timers:
+ advance = self._gen.send(when)
+ self.advance_time(advance)
+ self._timers = []
+
+ def call_at(self, when, callback, *args):
+ self._timers.append(when)
+ return super().call_at(when, callback, *args)
+
+ def _process_events(self, event_list):
+ return
+
+ def _write_to_self(self):
+ pass
+
+
+def MockCallback(**kwargs):
+ return mock.Mock(spec=['__call__'], **kwargs)
+
+
+class MockPattern(str):
+ """A regex based str with a fuzzy __eq__.
+
+ Use this helper with 'mock.assert_called_with', or anywhere
+ where a regex comparison between strings is needed.
+
+ For instance:
+ mock_call.assert_called_with(MockPattern('spam.*ham'))
+ """
+ def __eq__(self, other):
+ return bool(re.search(str(self), other, re.S))
+
+
+def get_function_source(func):
+ source = events._get_function_source(func)
+ if source is None:
+ raise ValueError("unable to get the source of %r" % (func,))
+ return source
+
+
+class TestCase(unittest.TestCase):
+ @staticmethod
+ def close_loop(loop):
+ executor = loop._default_executor
+ if executor is not None:
+ executor.shutdown(wait=True)
+ loop.close()
+
+ def set_event_loop(self, loop, *, cleanup=True):
+ assert loop is not None
+ # ensure that the event loop is passed explicitly in asyncio
+ events.set_event_loop(None)
+ if cleanup:
+ self.addCleanup(self.close_loop, loop)
+
+ def new_test_loop(self, gen=None):
+ loop = TestLoop(gen)
+ self.set_event_loop(loop)
+ return loop
+
+ def unpatch_get_running_loop(self):
+ events._get_running_loop = self._get_running_loop
+
+ def setUp(self):
+ self._get_running_loop = events._get_running_loop
+ events._get_running_loop = lambda: None
+ self._thread_cleanup = support.threading_setup()
+
+ def tearDown(self):
+ self.unpatch_get_running_loop()
+
+ events.set_event_loop(None)
+
+ # Detect CPython bug #23353: ensure that yield/yield-from is not used
+ # in an except block of a generator
+ self.assertEqual(sys.exc_info(), (None, None, None))
+
+ self.doCleanups()
+ support.threading_cleanup(*self._thread_cleanup)
+ support.reap_children()
+
+ if not compat.PY34:
+ # Python 3.3 compatibility
+ def subTest(self, *args, **kwargs):
+ class EmptyCM:
+ def __enter__(self):
+ pass
+ def __exit__(self, *exc):
+ pass
+ return EmptyCM()
+
+
[email protected]
+def disable_logger():
+ """Context manager to disable asyncio logger.
+
+ For example, it can be used to ignore warnings in debug mode.
+ """
+ old_level = logger.level
+ try:
+ logger.setLevel(logging.CRITICAL+1)
+ yield
+ finally:
+ logger.setLevel(old_level)
+
+
+def mock_nonblocking_socket(proto=socket.IPPROTO_TCP, type=socket.SOCK_STREAM,
+ family=socket.AF_INET):
+ """Create a mock of a non-blocking socket."""
+ sock = mock.MagicMock(socket.socket)
+ sock.proto = proto
+ sock.type = type
+ sock.family = family
+ sock.gettimeout.return_value = 0.0
+ return sock
+
+
+def force_legacy_ssl_support():
+ return mock.patch('asyncio.sslproto._is_sslproto_available',
+ return_value=False)
diff --git a/lib-python/3/idlelib/_pyclbr.py b/lib-python/3/idlelib/_pyclbr.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3/idlelib/_pyclbr.py
@@ -0,0 +1,402 @@
+# A private copy of 3.7.0a1 pyclbr for use by idlelib.browser
+"""Parse a Python module and describe its classes and functions.
+
+Parse enough of a Python file to recognize imports and class and
+function definitions, and to find out the superclasses of a class.
+
+The interface consists of a single function:
+ readmodule_ex(module, path=None)
+where module is the name of a Python module, and path is an optional
+list of directories where the module is to be searched. If present,
+path is prepended to the system search path sys.path. The return value
+is a dictionary. The keys of the dictionary are the names of the
+classes and functions defined in the module (including classes that are
+defined via the from XXX import YYY construct). The values are
+instances of classes Class and Function. One special key/value pair is
+present for packages: the key '__path__' has a list as its value which
+contains the package search path.
+
+Classes and Functions have a common superclass: _Object. Every instance
+has the following attributes:
+ module -- name of the module;
+ name -- name of the object;
+ file -- file in which the object is defined;
+ lineno -- line in the file where the object's definition starts;
+ parent -- parent of this object, if any;
+ children -- nested objects contained in this object.
+The 'children' attribute is a dictionary mapping names to objects.
+
+Instances of Function describe functions with the attributes from _Object.
+
+Instances of Class describe classes with the attributes from _Object,
+plus the following:
+ super -- list of super classes (Class instances if possible);
+ methods -- mapping of method names to beginning line numbers.
+If the name of a super class is not recognized, the corresponding
+entry in the list of super classes is not a class instance but a
+string giving the name of the super class. Since import statements
+are recognized and imported modules are scanned as well, this
+shouldn't happen often.
+"""
+
+import io
+import sys
+import importlib.util
+import tokenize
+from token import NAME, DEDENT, OP
+
+__all__ = ["readmodule", "readmodule_ex", "Class", "Function"]
+
+_modules = {} # Initialize cache of modules we've seen.
+
+
+class _Object:
+ "Informaton about Python class or function."
+ def __init__(self, module, name, file, lineno, parent):
+ self.module = module
+ self.name = name
+ self.file = file
+ self.lineno = lineno
+ self.parent = parent
+ self.children = {}
+
+ def _addchild(self, name, obj):
+ self.children[name] = obj
+
+
+class Function(_Object):
+ "Information about a Python function, including methods."
+ def __init__(self, module, name, file, lineno, parent=None):
+ _Object.__init__(self, module, name, file, lineno, parent)
+
+
+class Class(_Object):
+ "Information about a Python class."
+ def __init__(self, module, name, super, file, lineno, parent=None):
+ _Object.__init__(self, module, name, file, lineno, parent)
+ self.super = [] if super is None else super
+ self.methods = {}
+
+ def _addmethod(self, name, lineno):
+ self.methods[name] = lineno
+
+
+def _nest_function(ob, func_name, lineno):
+ "Return a Function after nesting within ob."
+ newfunc = Function(ob.module, func_name, ob.file, lineno, ob)
+ ob._addchild(func_name, newfunc)
+ if isinstance(ob, Class):
+ ob._addmethod(func_name, lineno)
+ return newfunc
+
+def _nest_class(ob, class_name, lineno, super=None):
+ "Return a Class after nesting within ob."
+ newclass = Class(ob.module, class_name, super, ob.file, lineno, ob)
+ ob._addchild(class_name, newclass)
+ return newclass
+
+def readmodule(module, path=None):
+ """Return Class objects for the top-level classes in module.
+
+ This is the original interface, before Functions were added.
+ """
+
+ res = {}
+ for key, value in _readmodule(module, path or []).items():
+ if isinstance(value, Class):
+ res[key] = value
+ return res
+
+def readmodule_ex(module, path=None):
+ """Return a dictionary with all functions and classes in module.
+
+ Search for module in PATH + sys.path.
+ If possible, include imported superclasses.
+ Do this by reading source, without importing (and executing) it.
+ """
+ return _readmodule(module, path or [])
+
+def _readmodule(module, path, inpackage=None):
+ """Do the hard work for readmodule[_ex].
+
+ If inpackage is given, it must be the dotted name of the package in
+ which we are searching for a submodule, and then PATH must be the
+ package search path; otherwise, we are searching for a top-level
+ module, and path is combined with sys.path.
+ """
+ # Compute the full module name (prepending inpackage if set).
+ if inpackage is not None:
+ fullmodule = "%s.%s" % (inpackage, module)
+ else:
+ fullmodule = module
+
+ # Check in the cache.
+ if fullmodule in _modules:
+ return _modules[fullmodule]
+
+ # Initialize the dict for this module's contents.
+ tree = {}
+
+ # Check if it is a built-in module; we don't do much for these.
+ if module in sys.builtin_module_names and inpackage is None:
+ _modules[module] = tree
+ return tree
+
+ # Check for a dotted module name.
+ i = module.rfind('.')
+ if i >= 0:
+ package = module[:i]
+ submodule = module[i+1:]
+ parent = _readmodule(package, path, inpackage)
+ if inpackage is not None:
+ package = "%s.%s" % (inpackage, package)
+ if not '__path__' in parent:
+ raise ImportError('No package named {}'.format(package))
+ return _readmodule(submodule, parent['__path__'], package)
+
+ # Search the path for the module.
+ f = None
+ if inpackage is not None:
+ search_path = path
+ else:
+ search_path = path + sys.path
+ spec = importlib.util._find_spec_from_path(fullmodule, search_path)
+ _modules[fullmodule] = tree
+ # Is module a package?
+ if spec.submodule_search_locations is not None:
+ tree['__path__'] = spec.submodule_search_locations
+ try:
+ source = spec.loader.get_source(fullmodule)
+ if source is None:
+ return tree
+ except (AttributeError, ImportError):
+ # If module is not Python source, we cannot do anything.
+ return tree
+
+ fname = spec.loader.get_filename(fullmodule)
+ return _create_tree(fullmodule, path, fname, source, tree, inpackage)
+
+
+def _create_tree(fullmodule, path, fname, source, tree, inpackage):
+ """Return the tree for a particular module.
+
+ fullmodule (full module name), inpackage+module, becomes o.module.
+ path is passed to recursive calls of _readmodule.
+ fname becomes o.file.
+ source is tokenized. Imports cause recursive calls to _readmodule.
+ tree is {} or {'__path__': <submodule search locations>}.
+ inpackage, None or string, is passed to recursive calls of _readmodule.
+
+ The effect of recursive calls is mutation of global _modules.
+ """
+ f = io.StringIO(source)
+
+ stack = [] # Initialize stack of (class, indent) pairs.
+
+ g = tokenize.generate_tokens(f.readline)
+ try:
+ for tokentype, token, start, _end, _line in g:
+ if tokentype == DEDENT:
+ lineno, thisindent = start
+ # Close previous nested classes and defs.
+ while stack and stack[-1][1] >= thisindent:
+ del stack[-1]
+ elif token == 'def':
+ lineno, thisindent = start
+ # Close previous nested classes and defs.
+ while stack and stack[-1][1] >= thisindent:
+ del stack[-1]
+ tokentype, func_name, start = next(g)[0:3]
+ if tokentype != NAME:
+ continue # Skip def with syntax error.
+ cur_func = None
+ if stack:
+ cur_obj = stack[-1][0]
+ cur_func = _nest_function(cur_obj, func_name, lineno)
+ else:
+ # It is just a function.
+ cur_func = Function(fullmodule, func_name, fname, lineno)
+ tree[func_name] = cur_func
+ stack.append((cur_func, thisindent))
+ elif token == 'class':
+ lineno, thisindent = start
+ # Close previous nested classes and defs.
+ while stack and stack[-1][1] >= thisindent:
+ del stack[-1]
+ tokentype, class_name, start = next(g)[0:3]
+ if tokentype != NAME:
+ continue # Skip class with syntax error.
+ # Parse what follows the class name.
+ tokentype, token, start = next(g)[0:3]
+ inherit = None
+ if token == '(':
+ names = [] # Initialize list of superclasses.
+ level = 1
+ super = [] # Tokens making up current superclass.
+ while True:
+ tokentype, token, start = next(g)[0:3]
+ if token in (')', ',') and level == 1:
+ n = "".join(super)
+ if n in tree:
+ # We know this super class.
+ n = tree[n]
+ else:
+ c = n.split('.')
+ if len(c) > 1:
+ # Super class form is module.class:
+ # look in module for class.
+ m = c[-2]
+ c = c[-1]
+ if m in _modules:
+ d = _modules[m]
+ if c in d:
+ n = d[c]
+ names.append(n)
+ super = []
+ if token == '(':
+ level += 1
+ elif token == ')':
+ level -= 1
+ if level == 0:
+ break
+ elif token == ',' and level == 1:
+ pass
+ # Only use NAME and OP (== dot) tokens for type name.
+ elif tokentype in (NAME, OP) and level == 1:
+ super.append(token)
+ # Expressions in the base list are not supported.
+ inherit = names
+ if stack:
+ cur_obj = stack[-1][0]
+ cur_class = _nest_class(
+ cur_obj, class_name, lineno, inherit)
+ else:
+ cur_class = Class(fullmodule, class_name, inherit,
+ fname, lineno)
+ tree[class_name] = cur_class
+ stack.append((cur_class, thisindent))
+ elif token == 'import' and start[1] == 0:
+ modules = _getnamelist(g)
+ for mod, _mod2 in modules:
+ try:
+ # Recursively read the imported module.
+ if inpackage is None:
+ _readmodule(mod, path)
+ else:
+ try:
+ _readmodule(mod, path, inpackage)
+ except ImportError:
+ _readmodule(mod, [])
+ except:
+ # If we can't find or parse the imported module,
+ # too bad -- don't die here.
+ pass
+ elif token == 'from' and start[1] == 0:
+ mod, token = _getname(g)
+ if not mod or token != "import":
+ continue
+ names = _getnamelist(g)
+ try:
+ # Recursively read the imported module.
+ d = _readmodule(mod, path, inpackage)
+ except:
+ # If we can't find or parse the imported module,
+ # too bad -- don't die here.
+ continue
+ # Add any classes that were defined in the imported module
+ # to our name space if they were mentioned in the list.
+ for n, n2 in names:
+ if n in d:
+ tree[n2 or n] = d[n]
+ elif n == '*':
+ # Don't add names that start with _.
+ for n in d:
+ if n[0] != '_':
+ tree[n] = d[n]
+ except StopIteration:
+ pass
+
+ f.close()
+ return tree
+
+
+def _getnamelist(g):
+ """Return list of (dotted-name, as-name or None) tuples for token source g.
+
+ An as-name is the name that follows 'as' in an as clause.
+ """
+ names = []
+ while True:
+ name, token = _getname(g)
+ if not name:
+ break
+ if token == 'as':
+ name2, token = _getname(g)
+ else:
+ name2 = None
+ names.append((name, name2))
+ while token != "," and "\n" not in token:
+ token = next(g)[1]
+ if token != ",":
+ break
+ return names
+
+
+def _getname(g):
+ "Return (dotted-name or None, next-token) tuple for token source g."
+ parts = []
+ tokentype, token = next(g)[0:2]
+ if tokentype != NAME and token != '*':
+ return (None, token)
+ parts.append(token)
+ while True:
+ tokentype, token = next(g)[0:2]
+ if token != '.':
+ break
+ tokentype, token = next(g)[0:2]
+ if tokentype != NAME:
+ break
+ parts.append(token)
+ return (".".join(parts), token)
+
+
+def _main():
+ "Print module output (default this file) for quick visual check."
+ import os
+ try:
+ mod = sys.argv[1]
+ except:
+ mod = __file__
+ if os.path.exists(mod):
+ path = [os.path.dirname(mod)]
+ mod = os.path.basename(mod)
+ if mod.lower().endswith(".py"):
+ mod = mod[:-3]
+ else:
+ path = []
+ tree = readmodule_ex(mod, path)
+ lineno_key = lambda a: getattr(a, 'lineno', 0)
+ objs = sorted(tree.values(), key=lineno_key, reverse=True)
+ indent_level = 2
+ while objs:
+ obj = objs.pop()
+ if isinstance(obj, list):
+ # Value is a __path__ key.
+ continue
+ if not hasattr(obj, 'indent'):
+ obj.indent = 0
+
+ if isinstance(obj, _Object):
+ new_objs = sorted(obj.children.values(),
+ key=lineno_key, reverse=True)
+ for ob in new_objs:
+ ob.indent = obj.indent + indent_level
+ objs.extend(new_objs)
+ if isinstance(obj, Class):
+ print("{}class {} {} {}"
+ .format(' ' * obj.indent, obj.name, obj.super, obj.lineno))
+ elif isinstance(obj, Function):
+ print("{}def {} {}".format(' ' * obj.indent, obj.name, obj.lineno))
+
+if __name__ == "__main__":
+ _main()
diff --git a/lib-python/3/macurl2path.py b/lib-python/3/macurl2path.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3/macurl2path.py
@@ -0,0 +1,77 @@
+"""Macintosh-specific module for conversion between pathnames and URLs.
+
+Do not import directly; use urllib instead."""
+
+import urllib.parse
+import os
+
+__all__ = ["url2pathname","pathname2url"]
+
+def url2pathname(pathname):
+ """OS-specific conversion from a relative URL of the 'file' scheme
+ to a file system path; not recommended for general use."""
+ #
+ # XXXX The .. handling should be fixed...
+ #
+ tp = urllib.parse.splittype(pathname)[0]
+ if tp and tp != 'file':
+ raise RuntimeError('Cannot convert non-local URL to pathname')
+ # Turn starting /// into /, an empty hostname means current host
+ if pathname[:3] == '///':
+ pathname = pathname[2:]
+ elif pathname[:2] == '//':
+ raise RuntimeError('Cannot convert non-local URL to pathname')
+ components = pathname.split('/')
+ # Remove . and embedded ..
+ i = 0
+ while i < len(components):
+ if components[i] == '.':
+ del components[i]
+ elif components[i] == '..' and i > 0 and \
+ components[i-1] not in ('', '..'):
+ del components[i-1:i+1]
+ i = i-1
+ elif components[i] == '' and i > 0 and components[i-1] != '':
+ del components[i]
+ else:
+ i = i+1
+ if not components[0]:
+ # Absolute unix path, don't start with colon
+ rv = ':'.join(components[1:])
+ else:
+ # relative unix path, start with colon. First replace
+ # leading .. by empty strings (giving ::file)
+ i = 0
+ while i < len(components) and components[i] == '..':
+ components[i] = ''
+ i = i + 1
+ rv = ':' + ':'.join(components)
+ # and finally unquote slashes and other funny characters
+ return urllib.parse.unquote(rv)
+
+def pathname2url(pathname):
+ """OS-specific conversion from a file system path to a relative URL
+ of the 'file' scheme; not recommended for general use."""
+ if '/' in pathname:
+ raise RuntimeError("Cannot convert pathname containing slashes")
+ components = pathname.split(':')
+ # Remove empty first and/or last component
+ if components[0] == '':
+ del components[0]
+ if components[-1] == '':
+ del components[-1]
+ # Replace empty string ('::') by .. (will result in '/../' later)
+ for i in range(len(components)):
+ if components[i] == '':
+ components[i] = '..'
+ # Truncate names longer than 31 bytes
+ components = map(_pncomp2url, components)
+
+ if os.path.isabs(pathname):
+ return '/' + '/'.join(components)
+ else:
+ return '/'.join(components)
+
+def _pncomp2url(component):
+ # We want to quote slashes
+ return urllib.parse.quote(component[:31], safe='')
diff --git a/lib-python/3/test/bisect.py b/lib-python/3/test/bisect.py
new file mode 100755
--- /dev/null
+++ b/lib-python/3/test/bisect.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python3
+"""
+Command line tool to bisect failing CPython tests.
+
+Find the test_os test method which alters the environment:
+
+ ./python -m test.bisect --fail-env-changed test_os
+
+Find a reference leak in "test_os", write the list of failing tests into the
+"bisect" file:
+
+ ./python -m test.bisect -o bisect -R 3:3 test_os
+
+Load an existing list of tests from a file using -i option:
+
+ ./python -m test --list-cases -m FileTests test_os > tests
+ ./python -m test.bisect -i tests test_os
+"""
+
+import argparse
+import datetime
+import os.path
+import math
+import random
+import subprocess
+import sys
+import tempfile
+import time
+
+
+def write_tests(filename, tests):
+ with open(filename, "w") as fp:
+ for name in tests:
+ print(name, file=fp)
+ fp.flush()
+
+
+def write_output(filename, tests):
+ if not filename:
+ return
+ print("Writing %s tests into %s" % (len(tests), filename))
+ write_tests(filename, tests)
+ return filename
+
+
+def format_shell_args(args):
+ return ' '.join(args)
+
+
+def list_cases(args):
+ cmd = [sys.executable, '-m', 'test', '--list-cases']
+ cmd.extend(args.test_args)
+ proc = subprocess.run(cmd,
+ stdout=subprocess.PIPE,
+ universal_newlines=True)
+ exitcode = proc.returncode
+ if exitcode:
+ cmd = format_shell_args(cmd)
+ print("Failed to list tests: %s failed with exit code %s"
+ % (cmd, exitcode))
+ sys.exit(exitcode)
+ tests = proc.stdout.splitlines()
+ return tests
+
+
+def run_tests(args, tests, huntrleaks=None):
+ tmp = tempfile.mktemp()
+ try:
+ write_tests(tmp, tests)
+
+ cmd = [sys.executable, '-m', 'test', '--matchfile', tmp]
+ cmd.extend(args.test_args)
+ print("+ %s" % format_shell_args(cmd))
+ proc = subprocess.run(cmd)
+ return proc.returncode
+ finally:
+ if os.path.exists(tmp):
+ os.unlink(tmp)
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-i', '--input',
+ help='Test names produced by --list-tests written '
+ 'into a file. If not set, run --list-tests')
+ parser.add_argument('-o', '--output',
+ help='Result of the bisection')
+ parser.add_argument('-n', '--max-tests', type=int, default=1,
+ help='Maximum number of tests to stop the bisection '
+ '(default: 1)')
+ parser.add_argument('-N', '--max-iter', type=int, default=100,
+ help='Maximum number of bisection iterations '
+ '(default: 100)')
+ # FIXME: document that following arguments are test arguments
+
+ args, test_args = parser.parse_known_args()
+ args.test_args = test_args
+ return args
+
+
+def main():
+ args = parse_args()
+
+ if args.input:
+ with open(args.input) as fp:
+ tests = [line.strip() for line in fp]
+ else:
+ tests = list_cases(args)
+
+ print("Start bisection with %s tests" % len(tests))
+ print("Test arguments: %s" % format_shell_args(args.test_args))
+ print("Bisection will stop when getting %s or less tests "
+ "(-n/--max-tests option), or after %s iterations "
+ "(-N/--max-iter option)"
+ % (args.max_tests, args.max_iter))
+ output = write_output(args.output, tests)
+ print()
+
+ start_time = time.monotonic()
+ iteration = 1
+ try:
+ while len(tests) > args.max_tests and iteration <= args.max_iter:
+ ntest = len(tests)
+ ntest = max(ntest // 2, 1)
+ subtests = random.sample(tests, ntest)
+
+ print("[+] Iteration %s: run %s tests/%s"
+ % (iteration, len(subtests), len(tests)))
+ print()
+
+ exitcode = run_tests(args, subtests)
+
+ print("ran %s tests/%s" % (ntest, len(tests)))
+ print("exit", exitcode)
+ if exitcode:
+ print("Tests failed: continuing with this subtest")
+ tests = subtests
+ output = write_output(args.output, tests)
+ else:
+ print("Tests succeeded: skipping this subtest, trying a new
subset")
+ print()
+ iteration += 1
+ except KeyboardInterrupt:
+ print()
+ print("Bisection interrupted!")
+ print()
+
+ print("Tests (%s):" % len(tests))
+ for test in tests:
+ print("* %s" % test)
+ print()
+
+ if output:
+ print("Output written into %s" % output)
+
+ dt = math.ceil(time.monotonic() - start_time)
+ if len(tests) <= args.max_tests:
+ print("Bisection completed in %s iterations and %s"
+ % (iteration, datetime.timedelta(seconds=dt)))
+ sys.exit(1)
+ else:
+ print("Bisection failed after %s iterations and %s"
+ % (iteration, datetime.timedelta(seconds=dt)))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/lib-python/3/test/pystone.py b/lib-python/3/test/pystone.py
new file mode 100755
--- /dev/null
+++ b/lib-python/3/test/pystone.py
@@ -0,0 +1,277 @@
+#! /usr/bin/env python3
+
+"""
+"PYSTONE" Benchmark Program
+
+Version: Python/1.2 (corresponds to C/1.1 plus 3 Pystone fixes)
+
+Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013.
+
+ Translated from ADA to C by Rick Richardson.
+ Every method to preserve ADA-likeness has been used,
+ at the expense of C-ness.
+
+ Translated from C to Python by Guido van Rossum.
+
+Version History:
+
+ Version 1.1 corrects two bugs in version 1.0:
+
+ First, it leaked memory: in Proc1(), NextRecord ends
+ up having a pointer to itself. I have corrected this
+ by zapping NextRecord.PtrComp at the end of Proc1().
+
+ Second, Proc3() used the operator != to compare a
+ record to None. This is rather inefficient and not
+ true to the intention of the original benchmark (where
+ a pointer comparison to None is intended; the !=
+ operator attempts to find a method __cmp__ to do value
+ comparison of the record). Version 1.1 runs 5-10
+ percent faster than version 1.0, so benchmark figures
+ of different versions can't be compared directly.
+
+ Version 1.2 changes the division to floor division.
+
+ Under Python 3 version 1.1 would use the normal division
+ operator, resulting in some of the operations mistakenly
+ yielding floats. Version 1.2 instead uses floor division
+ making the benchmark an integer benchmark again.
+
+"""
+
+LOOPS = 50000
+
+from time import time
+
+__version__ = "1.2"
+
+[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6)
+
+class Record:
+
+ def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0,
+ IntComp = 0, StringComp = 0):
+ self.PtrComp = PtrComp
+ self.Discr = Discr
+ self.EnumComp = EnumComp
+ self.IntComp = IntComp
+ self.StringComp = StringComp
+
+ def copy(self):
+ return Record(self.PtrComp, self.Discr, self.EnumComp,
+ self.IntComp, self.StringComp)
+
+TRUE = 1
+FALSE = 0
+
+def main(loops=LOOPS):
+ benchtime, stones = pystones(loops)
+ print("Pystone(%s) time for %d passes = %g" % \
+ (__version__, loops, benchtime))
+ print("This machine benchmarks at %g pystones/second" % stones)
+
+
+def pystones(loops=LOOPS):
+ return Proc0(loops)
+
+IntGlob = 0
+BoolGlob = FALSE
+Char1Glob = '\0'
+Char2Glob = '\0'
+Array1Glob = [0]*51
+Array2Glob = [x[:] for x in [Array1Glob]*51]
+PtrGlb = None
+PtrGlbNext = None
+
+def Proc0(loops=LOOPS):
+ global IntGlob
+ global BoolGlob
+ global Char1Glob
+ global Char2Glob
+ global Array1Glob
+ global Array2Glob
+ global PtrGlb
+ global PtrGlbNext
+
+ starttime = time()
+ for i in range(loops):
+ pass
+ nulltime = time() - starttime
+
+ PtrGlbNext = Record()
+ PtrGlb = Record()
+ PtrGlb.PtrComp = PtrGlbNext
+ PtrGlb.Discr = Ident1
+ PtrGlb.EnumComp = Ident3
+ PtrGlb.IntComp = 40
+ PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING"
+ String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING"
+ Array2Glob[8][7] = 10
+
+ starttime = time()
+
+ for i in range(loops):
+ Proc5()
+ Proc4()
+ IntLoc1 = 2
+ IntLoc2 = 3
+ String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING"
+ EnumLoc = Ident2
+ BoolGlob = not Func2(String1Loc, String2Loc)
+ while IntLoc1 < IntLoc2:
+ IntLoc3 = 5 * IntLoc1 - IntLoc2
+ IntLoc3 = Proc7(IntLoc1, IntLoc2)
+ IntLoc1 = IntLoc1 + 1
+ Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3)
+ PtrGlb = Proc1(PtrGlb)
+ CharIndex = 'A'
+ while CharIndex <= Char2Glob:
+ if EnumLoc == Func1(CharIndex, 'C'):
+ EnumLoc = Proc6(Ident1)
+ CharIndex = chr(ord(CharIndex)+1)
+ IntLoc3 = IntLoc2 * IntLoc1
+ IntLoc2 = IntLoc3 // IntLoc1
+ IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1
+ IntLoc1 = Proc2(IntLoc1)
+
+ benchtime = time() - starttime - nulltime
+ if benchtime == 0.0:
+ loopsPerBenchtime = 0.0
+ else:
+ loopsPerBenchtime = (loops / benchtime)
+ return benchtime, loopsPerBenchtime
+
+def Proc1(PtrParIn):
+ PtrParIn.PtrComp = NextRecord = PtrGlb.copy()
+ PtrParIn.IntComp = 5
+ NextRecord.IntComp = PtrParIn.IntComp
+ NextRecord.PtrComp = PtrParIn.PtrComp
+ NextRecord.PtrComp = Proc3(NextRecord.PtrComp)
+ if NextRecord.Discr == Ident1:
+ NextRecord.IntComp = 6
+ NextRecord.EnumComp = Proc6(PtrParIn.EnumComp)
+ NextRecord.PtrComp = PtrGlb.PtrComp
+ NextRecord.IntComp = Proc7(NextRecord.IntComp, 10)
+ else:
+ PtrParIn = NextRecord.copy()
+ NextRecord.PtrComp = None
+ return PtrParIn
+
+def Proc2(IntParIO):
+ IntLoc = IntParIO + 10
+ while 1:
+ if Char1Glob == 'A':
+ IntLoc = IntLoc - 1
+ IntParIO = IntLoc - IntGlob
+ EnumLoc = Ident1
+ if EnumLoc == Ident1:
+ break
+ return IntParIO
+
+def Proc3(PtrParOut):
+ global IntGlob
+
+ if PtrGlb is not None:
+ PtrParOut = PtrGlb.PtrComp
+ else:
+ IntGlob = 100
+ PtrGlb.IntComp = Proc7(10, IntGlob)
+ return PtrParOut
+
+def Proc4():
+ global Char2Glob
+
+ BoolLoc = Char1Glob == 'A'
+ BoolLoc = BoolLoc or BoolGlob
+ Char2Glob = 'B'
+
+def Proc5():
+ global Char1Glob
+ global BoolGlob
+
+ Char1Glob = 'A'
+ BoolGlob = FALSE
+
+def Proc6(EnumParIn):
+ EnumParOut = EnumParIn
+ if not Func3(EnumParIn):
+ EnumParOut = Ident4
+ if EnumParIn == Ident1:
+ EnumParOut = Ident1
+ elif EnumParIn == Ident2:
+ if IntGlob > 100:
+ EnumParOut = Ident1
+ else:
+ EnumParOut = Ident4
+ elif EnumParIn == Ident3:
+ EnumParOut = Ident2
+ elif EnumParIn == Ident4:
+ pass
+ elif EnumParIn == Ident5:
+ EnumParOut = Ident3
+ return EnumParOut
+
+def Proc7(IntParI1, IntParI2):
+ IntLoc = IntParI1 + 2
+ IntParOut = IntParI2 + IntLoc
+ return IntParOut
+
+def Proc8(Array1Par, Array2Par, IntParI1, IntParI2):
+ global IntGlob
+
+ IntLoc = IntParI1 + 5
+ Array1Par[IntLoc] = IntParI2
+ Array1Par[IntLoc+1] = Array1Par[IntLoc]
+ Array1Par[IntLoc+30] = IntLoc
+ for IntIndex in range(IntLoc, IntLoc+2):
+ Array2Par[IntLoc][IntIndex] = IntLoc
+ Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1
+ Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc]
+ IntGlob = 5
+
+def Func1(CharPar1, CharPar2):
+ CharLoc1 = CharPar1
+ CharLoc2 = CharLoc1
+ if CharLoc2 != CharPar2:
+ return Ident1
+ else:
+ return Ident2
+
+def Func2(StrParI1, StrParI2):
+ IntLoc = 1
+ while IntLoc <= 1:
+ if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
+ CharLoc = 'A'
+ IntLoc = IntLoc + 1
+ if CharLoc >= 'W' and CharLoc <= 'Z':
+ IntLoc = 7
+ if CharLoc == 'X':
+ return TRUE
+ else:
+ if StrParI1 > StrParI2:
+ IntLoc = IntLoc + 7
+ return TRUE
+ else:
+ return FALSE
+
+def Func3(EnumParIn):
+ EnumLoc = EnumParIn
+ if EnumLoc == Ident3: return TRUE
+ return FALSE
+
+if __name__ == '__main__':
+ import sys
+ def error(msg):
+ print(msg, end=' ', file=sys.stderr)
+ print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr)
+ sys.exit(100)
+ nargs = len(sys.argv) - 1
+ if nargs > 1:
+ error("%d arguments are too many;" % nargs)
+ elif nargs == 1:
+ try: loops = int(sys.argv[1])
+ except ValueError:
+ error("Invalid argument %r;" % sys.argv[1])
+ else:
+ loops = LOOPS
+ main(loops)
diff --git a/lib-python/3/test/test_macurl2path.py
b/lib-python/3/test/test_macurl2path.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3/test/test_macurl2path.py
@@ -0,0 +1,31 @@
+import macurl2path
+import unittest
+
+class MacUrl2PathTestCase(unittest.TestCase):
+ def test_url2pathname(self):
+ self.assertEqual(":index.html", macurl2path.url2pathname("index.html"))
+ self.assertEqual(":bar:index.html",
macurl2path.url2pathname("bar/index.html"))
+ self.assertEqual("foo:bar:index.html",
macurl2path.url2pathname("/foo/bar/index.html"))
+ self.assertEqual("foo:bar", macurl2path.url2pathname("/foo/bar/"))
+ self.assertEqual("", macurl2path.url2pathname("/"))
+ self.assertRaises(RuntimeError, macurl2path.url2pathname,
"http://foo.com")
+ self.assertEqual("index.html",
macurl2path.url2pathname("///index.html"))
+ self.assertRaises(RuntimeError, macurl2path.url2pathname,
"//index.html")
+ self.assertEqual(":index.html",
macurl2path.url2pathname("./index.html"))
+ self.assertEqual(":index.html",
macurl2path.url2pathname("foo/../index.html"))
+ self.assertEqual("::index.html",
macurl2path.url2pathname("../index.html"))
+
+ def test_pathname2url(self):
+ self.assertEqual("drive", macurl2path.pathname2url("drive:"))
+ self.assertEqual("drive/dir", macurl2path.pathname2url("drive:dir:"))
+ self.assertEqual("drive/dir/file",
macurl2path.pathname2url("drive:dir:file"))
+ self.assertEqual("drive/file", macurl2path.pathname2url("drive:file"))
+ self.assertEqual("file", macurl2path.pathname2url("file"))
+ self.assertEqual("file", macurl2path.pathname2url(":file"))
+ self.assertEqual("dir", macurl2path.pathname2url(":dir:"))
+ self.assertEqual("dir/file", macurl2path.pathname2url(":dir:file"))
+ self.assertRaises(RuntimeError, macurl2path.pathname2url, "/")
+ self.assertEqual("dir/../file", macurl2path.pathname2url("dir::file"))
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/pypy/interpreter/test/test_nestedscope.py
b/pypy/interpreter/test/apptest_nestedscope.py
rename from pypy/interpreter/test/test_nestedscope.py
rename to pypy/interpreter/test/apptest_nestedscope.py
--- a/pypy/interpreter/test/test_nestedscope.py
+++ b/pypy/interpreter/test/apptest_nestedscope.py
@@ -1,171 +1,169 @@
+from pytest import raises
+def test_nested_scope():
+ x = 42
+ def f(): return x
+ assert f() == 42
-class AppTestNestedScope:
+def test_nested_scope2():
+ x = 42
+ y = 3
+ def f(): return x
+ assert f() == 42
- def test_nested_scope(self):
- x = 42
- def f(): return x
- assert f() == 42
+def test_nested_scope3():
+ x = 42
+ def f():
+ def g():
+ return x
+ return g
+ assert f()() == 42
- def test_nested_scope2(self):
- x = 42
- y = 3
- def f(): return x
- assert f() == 42
+def test_nested_scope4():
+ def f():
+ x = 3
+ def g():
+ return x
+ a = g()
+ x = 4
+ b = g()
+ return (a, b)
+ assert f() == (3, 4)
- def test_nested_scope3(self):
- x = 42
- def f():
- def g():
- return x
- return g
- assert f()() == 42
+def test_nested_scope_locals():
+ def f():
+ x = 3
+ def g():
+ i = x
+ return locals()
+ return g()
+ d = f()
+ assert d == {'i':3, 'x':3}
- def test_nested_scope4(self):
- def f():
- x = 3
- def g():
- return x
- a = g()
- x = 4
- b = g()
- return (a, b)
- assert f() == (3, 4)
-
- def test_nested_scope_locals(self):
- def f():
- x = 3
- def g():
+def test_deeply_nested_scope_locals():
+ def f():
+ x = 3
+ def g():
+ def h():
i = x
return locals()
- return g()
- d = f()
- assert d == {'i':3, 'x':3}
+ return locals(), h()
+ return g()
+ outer_locals, inner_locals = f()
+ assert inner_locals == {'i':3, 'x':3}
+ keys = sorted(outer_locals.keys())
+ assert keys == ['h', 'x']
- def test_deeply_nested_scope_locals(self):
- def f():
- x = 3
- def g():
- def h():
- i = x
- return locals()
- return locals(), h()
- return g()
- outer_locals, inner_locals = f()
- assert inner_locals == {'i':3, 'x':3}
- keys = sorted(outer_locals.keys())
- assert keys == ['h', 'x']
+def test_lambda_in_genexpr():
+ assert [x() for x in (lambda: x for x in range(10))] == list(range(10))
- def test_lambda_in_genexpr(self):
- assert [x() for x in (lambda: x for x in range(10))] == list(range(10))
+def test_cell_repr():
+ import re
+ from reprlib import repr as r # Don't shadow builtin repr
- def test_cell_repr(self):
- import re
- from reprlib import repr as r # Don't shadow builtin repr
+ def get_cell():
+ x = 42
+ def inner():
+ return x
+ return inner
+ x = get_cell().__closure__[0]
+ assert re.match(r'<cell at 0x[0-9A-Fa-f]+: int object at 0x[0-9A-Fa-f]+>',
repr(x))
+ assert re.match(r'<cell at 0x.*\.\.\..*>', r(x))
- def get_cell():
+ def get_cell():
+ if False:
x = 42
- def inner():
+ def inner():
+ return x
+ return inner
+ x = get_cell().__closure__[0]
+ assert re.match(r'<cell at 0x[0-9A-Fa-f]+: empty>', repr(x))
+
+def test_cell_contents():
+ def f(x):
+ def f(y):
+ return x + y
+ return f
+
+ g = f(10)
+ assert g.__closure__[0].cell_contents == 10
+
+def test_empty_cell_contents():
+
+ def f():
+ def f(y):
+ return x + y
+ return f
+ x = 1
+
+ g = f()
+ with raises(ValueError):
+ g.__closure__[0].cell_contents
+
+def test_compare_cells():
+ def f(n):
+ if n:
+ x = n
+ def f(y):
+ return x + y
+ return f
+
+ empty_cell_1 = f(0).__closure__[0]
+ empty_cell_2 = f(0).__closure__[0]
+ g1 = f(1).__closure__[0]
+ g2 = f(2).__closure__[0]
+ assert g1 < g2
+ assert g1 <= g2
+ assert g2 > g1
+ assert g2 >= g1
+ assert not g1 == g2
+ assert g1 != g2
+ #
+ assert empty_cell_1 == empty_cell_2
+ assert not empty_cell_1 != empty_cell_2
+ assert empty_cell_1 < g1
+
+def test_leaking_class_locals():
+ def f(x):
+ class X:
+ x = 12
+ def f(self):
return x
- return inner
- x = get_cell().__closure__[0]
- assert re.match(r'<cell at 0x[0-9A-Fa-f]+: int object at
0x[0-9A-Fa-f]+>', repr(x))
- assert re.match(r'<cell at 0x.*\.\.\..*>', r(x))
+ locals()
+ return X
+ assert f(1).x == 12
- def get_cell():
- if False:
- x = 42
- def inner():
- return x
- return inner
- x = get_cell().__closure__[0]
- assert re.match(r'<cell at 0x[0-9A-Fa-f]+: empty>', repr(x))
+def test_nested_scope_locals_mutating_cellvars():
+ def f():
+ x = 12
+ def m():
+ locals()
+ x
+ locals()
+ return x
+ return m
+ assert f()() == 12
- def test_cell_contents(self):
- def f(x):
- def f(y):
- return x + y
- return f
- g = f(10)
- assert g.__closure__[0].cell_contents == 10
+def test_unbound_local_after_del():
+ """
+ # #4617: It is now legal to delete a cell variable.
+ # The following functions must obviously compile,
+ # and give the correct error when accessing the deleted name.
+ def errorInOuter():
+ y = 1
+ del y
+ print(y)
+ def inner():
+ return y
- def test_empty_cell_contents(self):
+ def errorInInner():
+ def inner():
+ return y
+ y = 1
+ del y
+ inner()
- def f():
- def f(y):
- return x + y
- return f
- x = 1
-
- g = f()
- with raises(ValueError):
- g.__closure__[0].cell_contents
-
- def test_compare_cells(self):
- def f(n):
- if n:
- x = n
- def f(y):
- return x + y
- return f
-
- empty_cell_1 = f(0).__closure__[0]
- empty_cell_2 = f(0).__closure__[0]
- g1 = f(1).__closure__[0]
- g2 = f(2).__closure__[0]
- assert g1 < g2
- assert g1 <= g2
- assert g2 > g1
- assert g2 >= g1
- assert not g1 == g2
- assert g1 != g2
- #
- assert empty_cell_1 == empty_cell_2
- assert not empty_cell_1 != empty_cell_2
- assert empty_cell_1 < g1
-
- def test_leaking_class_locals(self):
- def f(x):
- class X:
- x = 12
- def f(self):
- return x
- locals()
- return X
- assert f(1).x == 12
-
- def test_nested_scope_locals_mutating_cellvars(self):
- def f():
- x = 12
- def m():
- locals()
- x
- locals()
- return x
- return m
- assert f()() == 12
-
-
- def test_unbound_local_after_del(self):
- """
- # #4617: It is now legal to delete a cell variable.
- # The following functions must obviously compile,
- # and give the correct error when accessing the deleted name.
- def errorInOuter():
- y = 1
- del y
- print(y)
- def inner():
- return y
-
- def errorInInner():
- def inner():
- return y
- y = 1
- del y
- inner()
-
- raises(UnboundLocalError, "errorInOuter()")
- raises(NameError, "errorInInner()")
- """
+ raises(UnboundLocalError, "errorInOuter()")
+ raises(NameError, "errorInInner()")
+ """
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit