[Python-checkins] [3.12] gh-115961: Improve tests for compressed file-like objects (GH-115963) (GH-116032)

2024-02-28 Thread serhiy-storchaka
https://github.com/python/cpython/commit/e5e98626a15dd62eccbb6a281ae0263f005f4942
commit: e5e98626a15dd62eccbb6a281ae0263f005f4942
branch: 3.12
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: serhiy-storchaka 
date: 2024-02-28T11:00:50+02:00
summary:

[3.12] gh-115961: Improve tests for compressed file-like objects (GH-115963) 
(GH-116032)

* Increase coverage for compressed file-like objects initialized with a
  file name, an open file object, a file object opened by file
  descriptor, and a file-like object without name and mode attributes
  (io.BytesIO)
* Increase coverage for name, fileno(), mode, readable(), writable(),
  seekable() in different modes and states
* No longer skip tests with bytes names
* Test objects implementing the path protocol, not just pathlib.Path.
(cherry picked from commit e72576c48b8be1e4f22c2f387f9769efa073c5be)

Co-authored-by: Serhiy Storchaka 

files:
M Lib/test/test_bz2.py
M Lib/test/test_gzip.py
M Lib/test/test_lzma.py
M Lib/test/test_tarfile.py
M Lib/test/test_zipfile/test_core.py

diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index 1f0b9adc3698b4..772f0eacce28f5 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -3,19 +3,19 @@
 
 import array
 import unittest
+import io
 from io import BytesIO, DEFAULT_BUFFER_SIZE
 import os
 import pickle
 import glob
 import tempfile
-import pathlib
 import random
 import shutil
 import subprocess
 import threading
 from test.support import import_helper
 from test.support import threading_helper
-from test.support.os_helper import unlink
+from test.support.os_helper import unlink, FakePath
 import _compression
 import sys
 
@@ -537,12 +537,136 @@ def testMultiStreamOrdering(self):
 with BZ2File(self.filename) as bz2f:
 self.assertEqual(bz2f.read(), data1 + data2)
 
+def testOpenFilename(self):
+with BZ2File(self.filename, "wb") as f:
+f.write(b'content')
+self.assertIsInstance(f.fileno(), int)
+self.assertIs(f.readable(), False)
+self.assertIs(f.writable(), True)
+self.assertIs(f.seekable(), False)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+self.assertRaises(ValueError, f.fileno)
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+with BZ2File(self.filename, "ab") as f:
+f.write(b'appendix')
+self.assertIsInstance(f.fileno(), int)
+self.assertIs(f.readable(), False)
+self.assertIs(f.writable(), True)
+self.assertIs(f.seekable(), False)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+self.assertRaises(ValueError, f.fileno)
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+with BZ2File(self.filename, 'rb') as f:
+self.assertEqual(f.read(), b'contentappendix')
+self.assertIsInstance(f.fileno(), int)
+self.assertIs(f.readable(), True)
+self.assertIs(f.writable(), False)
+self.assertIs(f.seekable(), True)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+with self.assertRaises(ValueError):
+f.fileno()
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+def testOpenFileWithName(self):
+with open(self.filename, 'wb') as raw:
+with BZ2File(raw, 'wb') as f:
+f.write(b'content')
+self.assertEqual(f.fileno(), raw.fileno())
+self.assertIs(f.readable(), False)
+self.assertIs(f.writable(), True)
+self.assertIs(f.seekable(), False)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+self.assertRaises(ValueError, f.fileno)
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+with open(self.filename, 'ab') as raw:
+with BZ2File(raw, 'ab') as f:
+f.write(b'appendix')
+self.assertEqual(f.fileno(), raw.fileno())
+self.assertIs(f.readable(), False)
+self.assertIs(f.writable(), True)
+self.assertIs(f.seekable(), False)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+self.assertRaises(ValueError, f.fileno)
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+with open(self.f

[Python-checkins] [3.11] gh-115961: Improve tests for compressed file-like objects (GH-115963) (GH-116039)

2024-02-28 Thread serhiy-storchaka
https://github.com/python/cpython/commit/becc61596f79a3bb2bfeb7134581ae46638d34dc
commit: becc61596f79a3bb2bfeb7134581ae46638d34dc
branch: 3.11
author: Serhiy Storchaka 
committer: serhiy-storchaka 
date: 2024-02-28T09:38:39Z
summary:

[3.11] gh-115961: Improve tests for compressed file-like objects (GH-115963) 
(GH-116039)

* Increase coverage for compressed file-like objects initialized with a
  file name, an open file object, a file object opened by file
  descriptor, and a file-like object without name and mode attributes
  (io.BytesIO)
* Increase coverage for name, fileno(), mode, readable(), writable(),
  seekable() in different modes and states
* No longer skip tests with bytes names
* Test objects implementing the path protocol, not just pathlib.Path.
(cherry picked from commit e72576c48b8be1e4f22c2f387f9769efa073c5be)

files:
M Lib/test/test_bz2.py
M Lib/test/test_gzip.py
M Lib/test/test_lzma.py
M Lib/test/test_tarfile.py
M Lib/test/test_zipfile.py

diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index ba1c02c833847c..16dee59d1c6b24 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -3,19 +3,19 @@
 
 import array
 import unittest
+import io
 from io import BytesIO, DEFAULT_BUFFER_SIZE
 import os
 import pickle
 import glob
 import tempfile
-import pathlib
 import random
 import shutil
 import subprocess
 import threading
 from test.support import import_helper
 from test.support import threading_helper
-from test.support.os_helper import unlink
+from test.support.os_helper import unlink, FakePath
 import _compression
 import sys
 
@@ -537,12 +537,136 @@ def testMultiStreamOrdering(self):
 with BZ2File(self.filename) as bz2f:
 self.assertEqual(bz2f.read(), data1 + data2)
 
+def testOpenFilename(self):
+with BZ2File(self.filename, "wb") as f:
+f.write(b'content')
+self.assertIsInstance(f.fileno(), int)
+self.assertIs(f.readable(), False)
+self.assertIs(f.writable(), True)
+self.assertIs(f.seekable(), False)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+self.assertRaises(ValueError, f.fileno)
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+with BZ2File(self.filename, "ab") as f:
+f.write(b'appendix')
+self.assertIsInstance(f.fileno(), int)
+self.assertIs(f.readable(), False)
+self.assertIs(f.writable(), True)
+self.assertIs(f.seekable(), False)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+self.assertRaises(ValueError, f.fileno)
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+with BZ2File(self.filename, 'rb') as f:
+self.assertEqual(f.read(), b'contentappendix')
+self.assertIsInstance(f.fileno(), int)
+self.assertIs(f.readable(), True)
+self.assertIs(f.writable(), False)
+self.assertIs(f.seekable(), True)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+with self.assertRaises(ValueError):
+f.fileno()
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+def testOpenFileWithName(self):
+with open(self.filename, 'wb') as raw:
+with BZ2File(raw, 'wb') as f:
+f.write(b'content')
+self.assertEqual(f.fileno(), raw.fileno())
+self.assertIs(f.readable(), False)
+self.assertIs(f.writable(), True)
+self.assertIs(f.seekable(), False)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+self.assertRaises(ValueError, f.fileno)
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+with open(self.filename, 'ab') as raw:
+with BZ2File(raw, 'ab') as f:
+f.write(b'appendix')
+self.assertEqual(f.fileno(), raw.fileno())
+self.assertIs(f.readable(), False)
+self.assertIs(f.writable(), True)
+self.assertIs(f.seekable(), False)
+self.assertIs(f.closed, False)
+self.assertIs(f.closed, True)
+self.assertRaises(ValueError, f.fileno)
+self.assertRaises(ValueError, f.readable)
+self.assertRaises(ValueError, f.writable)
+self.assertRaises(ValueError, f.seekable)
+
+with open(self.filename, 'rb') as raw:
+with BZ2File(raw, 'rb') as f:
+self.assertEqual(f.re

[Python-checkins] gh-105858: Expose some union-related objects as internal APIs (GH-116025)

2024-02-28 Thread encukou
https://github.com/python/cpython/commit/d53560deb2c9ae12147201003fe63b266654ee21
commit: d53560deb2c9ae12147201003fe63b266654ee21
branch: main
author: Jelle Zijlstra 
committer: encukou 
date: 2024-02-28T09:56:40Z
summary:

gh-105858: Expose some union-related objects as internal APIs (GH-116025)

We now use these in the AST parsing code after gh-105880. A few comparable 
types (e.g.,
NoneType) are already exposed as internal APIs.

files:
M Include/internal/pycore_unionobject.h

diff --git a/Include/internal/pycore_unionobject.h 
b/Include/internal/pycore_unionobject.h
index 87264635b6e1cf..6ece7134cdeca0 100644
--- a/Include/internal/pycore_unionobject.h
+++ b/Include/internal/pycore_unionobject.h
@@ -8,9 +8,11 @@ extern "C" {
 #  error "this header requires Py_BUILD_CORE define"
 #endif
 
-extern PyTypeObject _PyUnion_Type;
+// For extensions created by test_peg_generator
+PyAPI_DATA(PyTypeObject) _PyUnion_Type;
+PyAPI_FUNC(PyObject *) _Py_union_type_or(PyObject *, PyObject *);
+
 #define _PyUnion_Check(op) Py_IS_TYPE((op), &_PyUnion_Type)
-extern PyObject *_Py_union_type_or(PyObject *, PyObject *);
 
 #define _PyGenericAlias_Check(op) PyObject_TypeCheck((op), 
&Py_GenericAliasType)
 extern PyObject *_Py_subs_parameters(PyObject *, PyObject *, PyObject *, 
PyObject *);

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-115773: Add tests to exercise the _Py_DebugOffsets structure (#115774)

2024-02-28 Thread pablogsal
https://github.com/python/cpython/commit/1752b51012269eaa35f7a28f162d18479a4f72aa
commit: 1752b51012269eaa35f7a28f162d18479a4f72aa
branch: main
author: Pablo Galindo Salgado 
committer: pablogsal 
date: 2024-02-28T10:17:34Z
summary:

gh-115773: Add tests to exercise the _Py_DebugOffsets structure (#115774)

files:
A Lib/test/test_external_inspection.py
A Modules/_testexternalinspection.c
M Include/internal/pycore_runtime.h
M Include/internal/pycore_runtime_init.h
M Modules/Setup
M Modules/Setup.stdlib.in
M Tools/build/generate_stdlib_module_names.py
M configure
M configure.ac
M pyconfig.h.in

diff --git a/Include/internal/pycore_runtime.h 
b/Include/internal/pycore_runtime.h
index 0c9c59e85b2fcf..dc6f6f100f7a92 100644
--- a/Include/internal/pycore_runtime.h
+++ b/Include/internal/pycore_runtime.h
@@ -55,74 +55,81 @@ typedef struct _Py_DebugOffsets {
 uint64_t version;
 // Runtime state offset;
 struct _runtime_state {
-off_t finalizing;
-off_t interpreters_head;
+uint64_t finalizing;
+uint64_t interpreters_head;
 } runtime_state;
 
 // Interpreter state offset;
 struct _interpreter_state {
-off_t next;
-off_t threads_head;
-off_t gc;
-off_t imports_modules;
-off_t sysdict;
-off_t builtins;
-off_t ceval_gil;
-off_t gil_runtime_state_locked;
-off_t gil_runtime_state_holder;
+uint64_t next;
+uint64_t threads_head;
+uint64_t gc;
+uint64_t imports_modules;
+uint64_t sysdict;
+uint64_t builtins;
+uint64_t ceval_gil;
+uint64_t gil_runtime_state_locked;
+uint64_t gil_runtime_state_holder;
 } interpreter_state;
 
 // Thread state offset;
 struct _thread_state{
-off_t prev;
-off_t next;
-off_t interp;
-off_t current_frame;
-off_t thread_id;
-off_t native_thread_id;
+uint64_t prev;
+uint64_t next;
+uint64_t interp;
+uint64_t current_frame;
+uint64_t thread_id;
+uint64_t native_thread_id;
 } thread_state;
 
 // InterpreterFrame offset;
 struct _interpreter_frame {
-off_t previous;
-off_t executable;
-off_t instr_ptr;
-off_t localsplus;
-off_t owner;
+uint64_t previous;
+uint64_t executable;
+uint64_t instr_ptr;
+uint64_t localsplus;
+uint64_t owner;
 } interpreter_frame;
 
 // CFrame offset;
 struct _cframe {
-off_t current_frame;
-off_t previous;
+uint64_t current_frame;
+uint64_t previous;
 } cframe;
 
 // Code object offset;
 struct _code_object {
-off_t filename;
-off_t name;
-off_t linetable;
-off_t firstlineno;
-off_t argcount;
-off_t localsplusnames;
-off_t localspluskinds;
-off_t co_code_adaptive;
+uint64_t filename;
+uint64_t name;
+uint64_t linetable;
+uint64_t firstlineno;
+uint64_t argcount;
+uint64_t localsplusnames;
+uint64_t localspluskinds;
+uint64_t co_code_adaptive;
 } code_object;
 
 // PyObject offset;
 struct _pyobject {
-off_t ob_type;
+uint64_t ob_type;
 } pyobject;
 
 // PyTypeObject object offset;
 struct _type_object {
-off_t tp_name;
+uint64_t tp_name;
 } type_object;
 
 // PyTuple object offset;
 struct _tuple_object {
-off_t ob_item;
+uint64_t ob_item;
 } tuple_object;
+
+// Unicode object offset;
+struct _unicode_object {
+uint64_t state;
+uint64_t length;
+size_t asciiobject_size;
+} unicode_object;
 } _Py_DebugOffsets;
 
 /* Full Python runtime state */
diff --git a/Include/internal/pycore_runtime_init.h 
b/Include/internal/pycore_runtime_init.h
index d093047d4bc09d..cc47b9a82e2879 100644
--- a/Include/internal/pycore_runtime_init.h
+++ b/Include/internal/pycore_runtime_init.h
@@ -83,6 +83,11 @@ extern PyTypeObject _PyExc_MemoryError;
 .tuple_object = { \
 .ob_item = offsetof(PyTupleObject, ob_item), \
 }, \
+.unicode_object = { \
+.state = offsetof(PyUnicodeObject, _base._base.state), \
+.length = offsetof(PyUnicodeObject, _base._base.length), \
+.asciiobject_size = sizeof(PyASCIIObject), \
+}, \
 }, \
 .allocators = { \
 .standard = _pymem_allocators_standard_INIT(runtime), \
diff --git a/Lib/test/test_external_inspection.py 
b/Lib/test/test_external_inspection.py
new file mode 100644
index 00..86c07de507e39c
--- /dev/null
+++ b/Lib/test/test_external_inspection.py
@@ -0,0 +1,84 @@
+import unittest
+import os
+import textwrap
+import importlib
+import sys
+from test.support import os_helper, SHORT_TIMEOUT
+from test.support.script_helper imp

[Python-checkins] gh-116030: test_unparse: Add ``ctx`` argument to ``ast.Name`` calls (#116031)

2024-02-28 Thread JelleZijlstra
https://github.com/python/cpython/commit/3b63d0769f49171f53e9cecc686fa01a383bd4b1
commit: 3b63d0769f49171f53e9cecc686fa01a383bd4b1
branch: main
author: Kirill Podoprigora 
committer: JelleZijlstra 
date: 2024-02-28T03:04:23-08:00
summary:

gh-116030: test_unparse: Add ``ctx`` argument to ``ast.Name`` calls (#116031)

files:
M Lib/test/test_unparse.py

diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py
index 106704ba8c9c2d..bb15f64c59dbd1 100644
--- a/Lib/test/test_unparse.py
+++ b/Lib/test/test_unparse.py
@@ -370,13 +370,13 @@ def test_slices(self):
 self.check_ast_roundtrip("a[i:j, k]")
 
 def test_invalid_raise(self):
-self.check_invalid(ast.Raise(exc=None, cause=ast.Name(id="X")))
+self.check_invalid(ast.Raise(exc=None, cause=ast.Name(id="X", 
ctx=ast.Load(
 
 def test_invalid_fstring_value(self):
 self.check_invalid(
 ast.JoinedStr(
 values=[
-ast.Name(id="test"),
+ast.Name(id="test", ctx=ast.Load()),
 ast.Constant(value="test")
 ]
 )
@@ -718,7 +718,7 @@ def test_function_with_type_params_and_bound(self):
 body=[ast.Pass()],
 decorator_list=[],
 returns=None,
-type_params=[ast.TypeVar("T", bound=ast.Name("int"))],
+type_params=[ast.TypeVar("T", bound=ast.Name("int", 
ctx=ast.Load()))],
 )
 ast.fix_missing_locations(node)
 self.assertEqual(ast.unparse(node), "def f[T: int]():\npass")

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-114911: Add CPUStopwatch test helper (GH-114912)

2024-02-28 Thread encukou
https://github.com/python/cpython/commit/7acf1fb5a70776429bd99e741d69471eb2d1c1bb
commit: 7acf1fb5a70776429bd99e741d69471eb2d1c1bb
branch: main
author: Petr Viktorin 
committer: encukou 
date: 2024-02-28T12:53:48+01:00
summary:

gh-114911: Add CPUStopwatch test helper (GH-114912)

A few of our tests measure the time of CPU-bound operation, mainly
to avoid quadratic or worse behaviour.
Add a helper to ignore GC and time spent in other processes.

files:
M Lib/test/support/__init__.py
M Lib/test/test_int.py
M Lib/test/test_re.py

diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index 1d03ec0f5bd12b..401b2ce1fe213c 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -2381,6 +2381,46 @@ def sleeping_retry(timeout, err_msg=None, /,
 delay = min(delay * 2, max_delay)
 
 
+class CPUStopwatch:
+"""Context manager to roughly time a CPU-bound operation.
+
+Disables GC. Uses CPU time if it can (i.e. excludes sleeps & time of
+other processes).
+
+N.B.:
+- This *includes* time spent in other threads.
+- Some systems only have a coarse resolution; check
+  stopwatch.clock_info.rseolution if.
+
+Usage:
+
+with ProcessStopwatch() as stopwatch:
+...
+elapsed = stopwatch.seconds
+resolution = stopwatch.clock_info.resolution
+"""
+def __enter__(self):
+get_time = time.process_time
+clock_info = time.get_clock_info('process_time')
+if get_time() <= 0:  # some platforms like WASM lack process_time()
+get_time = time.monotonic
+clock_info = time.get_clock_info('monotonic')
+self.context = disable_gc()
+self.context.__enter__()
+self.get_time = get_time
+self.clock_info = clock_info
+self.start_time = get_time()
+return self
+
+def __exit__(self, *exc):
+try:
+end_time = self.get_time()
+finally:
+result = self.context.__exit__(*exc)
+self.seconds = end_time - self.start_time
+return result
+
+
 @contextlib.contextmanager
 def adjust_int_max_str_digits(max_digits):
 """Temporarily change the integer string conversion length limit."""
diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py
index 0bf55facad9fed..47fc50a0e20349 100644
--- a/Lib/test/test_int.py
+++ b/Lib/test/test_int.py
@@ -664,84 +664,78 @@ def test_denial_of_service_prevented_int_to_str(self):
 """Regression test: ensure we fail before performing O(N**2) work."""
 maxdigits = sys.get_int_max_str_digits()
 assert maxdigits < 50_000, maxdigits  # A test prerequisite.
-get_time = time.process_time
-if get_time() <= 0:  # some platforms like WASM lack process_time()
-get_time = time.monotonic
 
 huge_int = int(f'0x{"c"*65_000}', base=16)  # 78268 decimal digits.
 digits = 78_268
-with support.adjust_int_max_str_digits(digits):
-start = get_time()
+with (
+support.adjust_int_max_str_digits(digits),
+support.CPUStopwatch() as sw_convert):
 huge_decimal = str(huge_int)
-seconds_to_convert = get_time() - start
 self.assertEqual(len(huge_decimal), digits)
 # Ensuring that we chose a slow enough conversion to measure.
 # It takes 0.1 seconds on a Zen based cloud VM in an opt build.
 # Some OSes have a low res 1/64s timer, skip if hard to measure.
-if seconds_to_convert < 1/64:
+if sw_convert.seconds < sw_convert.clock_info.resolution * 2:
 raise unittest.SkipTest('"slow" conversion took only '
-f'{seconds_to_convert} seconds.')
+f'{sw_convert.seconds} seconds.')
 
 # We test with the limit almost at the size needed to check 
performance.
 # The performant limit check is slightly fuzzy, give it a some room.
 with support.adjust_int_max_str_digits(int(.995 * digits)):
-with self.assertRaises(ValueError) as err:
-start = get_time()
+with (
+self.assertRaises(ValueError) as err,
+support.CPUStopwatch() as sw_fail_huge):
 str(huge_int)
-seconds_to_fail_huge = get_time() - start
 self.assertIn('conversion', str(err.exception))
-self.assertLessEqual(seconds_to_fail_huge, seconds_to_convert/2)
+self.assertLessEqual(sw_fail_huge.seconds, sw_convert.seconds/2)
 
 # Now we test that a conversion that would take 30x as long also fails
 # in a similarly fast fashion.
 extra_huge_int = int(f'0x{"c"*500_000}', base=16)  # 602060 digits.
-with self.assertRaises(ValueError) as err:
-start = get_time()
+with (
+self.assertRaises(ValueError) as err,
+support.CPUStopwatch() as sw_fail_extra_huge):
   

[Python-checkins] gh-78612: Mark up eval() using param list (#115212)

2024-02-28 Thread erlend-aasland
https://github.com/python/cpython/commit/a71e32ce8e183023fc1ee401c22ebe35e4832f09
commit: a71e32ce8e183023fc1ee401c22ebe35e4832f09
branch: main
author: Erlend E. Aasland 
committer: erlend-aasland 
date: 2024-02-28T14:03:50+01:00
summary:

gh-78612: Mark up eval() using param list (#115212)

Also mention that the 'expression' parameter can be a string.

files:
M Doc/library/functions.rst

diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
index a4852b922b65b3..e598ef423de497 100644
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -526,9 +526,20 @@ are always available.  They are listed here in 
alphabetical order.
 
 .. function:: eval(expression, globals=None, locals=None)
 
-   The arguments are a string and optional globals and locals.  If provided,
-   *globals* must be a dictionary.  If provided, *locals* can be any mapping
-   object.
+   :param expression:
+  A Python expression.
+   :type expression: :class:`str` | :ref:`code object `
+
+   :param globals:
+  The global namespace (default: ``None``).
+   :type globals: :class:`dict` | ``None``
+
+   :param locals:
+  The local namespace (default: ``None``).
+   :type locals: :term:`mapping` | ``None``
+
+   :returns: The result of the evaluated expression.
+   :raises: Syntax errors are reported as exceptions.
 
The *expression* argument is parsed and evaluated as a Python expression
(technically speaking, a condition list) using the *globals* and *locals*
@@ -545,8 +556,7 @@ are always available.  They are listed here in alphabetical 
order.
:term:`nested scopes ` (non-locals) in the enclosing
environment.
 
-   The return value is the result of
-   the evaluated expression. Syntax errors are reported as exceptions.  
Example:
+   Example:
 
   >>> x = 1
   >>> eval('x+1')

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-115765: Don't use deprecated AC_EGREP_* macros in configure.ac (#116016)

2024-02-28 Thread erlend-aasland
https://github.com/python/cpython/commit/449c6da2bdc5c6aa5e096aa550a4ba377b85db46
commit: 449c6da2bdc5c6aa5e096aa550a4ba377b85db46
branch: main
author: Erlend E. Aasland 
committer: erlend-aasland 
date: 2024-02-28T14:35:41+01:00
summary:

gh-115765: Don't use deprecated AC_EGREP_* macros in configure.ac (#116016)

Rewrite using AX_CHECK_DEFINE and AC_CHECK_TYPES.

files:
M aclocal.m4
M configure
M configure.ac
M pyconfig.h.in

diff --git a/aclocal.m4 b/aclocal.m4
index 09ae5d1aa8a608..832aec19f48f17 100644
--- a/aclocal.m4
+++ b/aclocal.m4
@@ -150,6 +150,80 @@ AS_VAR_IF(CACHEVAR,yes,
 AS_VAR_POPDEF([CACHEVAR])dnl
 ])dnl AX_CHECK_COMPILE_FLAGS
 
+# ===
+# https://www.gnu.org/software/autoconf-archive/ax_check_define.html
+# ===
+#
+# SYNOPSIS
+#
+#   AC_CHECK_DEFINE([symbol], [ACTION-IF-FOUND], [ACTION-IF-NOT])
+#   AX_CHECK_DEFINE([includes],[symbol], [ACTION-IF-FOUND], [ACTION-IF-NOT])
+#
+# DESCRIPTION
+#
+#   Complements AC_CHECK_FUNC but it does not check for a function but for a
+#   define to exist. Consider a usage like:
+#
+#AC_CHECK_DEFINE(__STRICT_ANSI__, CFLAGS="$CFLAGS -D_XOPEN_SOURCE=500")
+#
+# LICENSE
+#
+#   Copyright (c) 2008 Guido U. Draheim 
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved.  This file is offered as-is, without any
+#   warranty.
+
+#serial 11
+
+AU_ALIAS([AC_CHECK_DEFINED], [AC_CHECK_DEFINE])
+AC_DEFUN([AC_CHECK_DEFINE],[
+AS_VAR_PUSHDEF([ac_var],[ac_cv_defined_$1])dnl
+AC_CACHE_CHECK([for $1 defined], ac_var,
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[
+  #ifdef $1
+  int ok;
+  (void)ok;
+  #else
+  choke me
+  #endif
+]])],[AS_VAR_SET(ac_var, yes)],[AS_VAR_SET(ac_var, no)]))
+AS_IF([test AS_VAR_GET(ac_var) != "no"], [$2], [$3])dnl
+AS_VAR_POPDEF([ac_var])dnl
+])
+
+AU_ALIAS([AX_CHECK_DEFINED], [AX_CHECK_DEFINE])
+AC_DEFUN([AX_CHECK_DEFINE],[
+AS_VAR_PUSHDEF([ac_var],[ac_cv_defined_$2_$1])dnl
+AC_CACHE_CHECK([for $2 defined in $1], ac_var,
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <$1>]], [[
+  #ifdef $2
+  int ok;
+  (void)ok;
+  #else
+  choke me
+  #endif
+]])],[AS_VAR_SET(ac_var, yes)],[AS_VAR_SET(ac_var, no)]))
+AS_IF([test AS_VAR_GET(ac_var) != "no"], [$3], [$4])dnl
+AS_VAR_POPDEF([ac_var])dnl
+])
+
+AC_DEFUN([AX_CHECK_FUNC],
+[AS_VAR_PUSHDEF([ac_var], [ac_cv_func_$2])dnl
+AC_CACHE_CHECK([for $2], ac_var,
+dnl AC_LANG_FUNC_LINK_TRY
+[AC_LINK_IFELSE([AC_LANG_PROGRAM([$1
+#undef $2
+char $2 ();],[
+char (*f) () = $2;
+return f != $2; ])],
+[AS_VAR_SET(ac_var, yes)],
+[AS_VAR_SET(ac_var, no)])])
+AS_IF([test AS_VAR_GET(ac_var) = yes], [$3], [$4])dnl
+AS_VAR_POPDEF([ac_var])dnl
+])# AC_CHECK_FUNC
+
 # ===
 # https://www.gnu.org/software/autoconf-archive/ax_check_openssl.html
 # ===
diff --git a/configure b/configure
index c204c9eb499559..f431c5dd15ec4a 100755
--- a/configure
+++ b/configure
@@ -11297,42 +11297,22 @@ then :
 fi
 
 
-# checks for typedefs
-
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for clock_t in time.h" 
>&5
-printf %s "checking for clock_t in time.h... " >&6; }
-if test ${ac_cv_clock_t_time_h+y}
-then :
-  printf %s "(cached) " >&6
-else $as_nop
-
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include 
-
-_ACEOF
-if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-  $EGREP "clock_t" >/dev/null 2>&1
+# Check for clock_t in time.h.
+ac_fn_c_check_type "$LINENO" "clock_t" "ac_cv_type_clock_t" "#include 
+"
+if test "x$ac_cv_type_clock_t" = xyes
 then :
-  ac_cv_clock_t_time_h=yes
-else $as_nop
-  ac_cv_clock_t_time_h=no
-fi
-rm -rf conftest*
 
+printf "%s\n" "#define HAVE_CLOCK_T 1" >>confdefs.h
 
-fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_clock_t_time_h" 
>&5
-printf "%s\n" "$ac_cv_clock_t_time_h" >&6; }
-if test "x$ac_cv_clock_t_time_h" = xno
-then :
 
+else $as_nop
 
 printf "%s\n" "#define clock_t long" >>confdefs.h
 
-
 fi
 
+
 { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for makedev" >&5
 printf %s "checking for makedev... " >&6; }
 if test ${ac_cv_func_makedev+y}
@@ -11534,6 +11514,7 @@ printf "%s\n" "#define size_t unsigned int" >>confdefs.h
 
 fi
 
+
 { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for uid_t in 
sys/types.h" >&5
 printf %s "checking for uid_t in sys/types.h... " >&6; }
 if test ${ac_cv_type_uid_t+y}
@@ -16184,24 +16165,47 @@ else
 # (e.g. gnu pth with pthread emulation)
 { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for _POSIX_THREADS 
in unistd.h" >&5
 printf %s "checking for _POSIX_THREADS in unistd.h..

[Python-checkins] doc: Use super() in subclassed JSONEncoder examples (GH-115565)

2024-02-28 Thread encukou
https://github.com/python/cpython/commit/647053fed182066d3b8c934fb0bf52ee48ff3911
commit: 647053fed182066d3b8c934fb0bf52ee48ff3911
branch: main
author: Jan Max Meyer 
committer: encukou 
date: 2024-02-28T14:54:12+01:00
summary:

doc: Use super() in subclassed JSONEncoder examples (GH-115565)

Replace calls to `json.JSONEncoder.default(self, obj)`
by `super().default(obj)` within the examples of the documentation.

files:
M Doc/library/json.rst
M Lib/json/encoder.py

diff --git a/Doc/library/json.rst b/Doc/library/json.rst
index 0ce4b697145cb3..c82ff9dc325b4c 100644
--- a/Doc/library/json.rst
+++ b/Doc/library/json.rst
@@ -106,7 +106,7 @@ Extending :class:`JSONEncoder`::
 ... if isinstance(obj, complex):
 ... return [obj.real, obj.imag]
 ... # Let the base class default method raise the TypeError
-... return json.JSONEncoder.default(self, obj)
+... return super().default(obj)
 ...
 >>> json.dumps(2 + 1j, cls=ComplexEncoder)
 '[2.0, 1.0]'
@@ -504,7 +504,7 @@ Encoders and Decoders
 else:
 return list(iterable)
 # Let the base class default method raise the TypeError
-return json.JSONEncoder.default(self, o)
+return super().default(o)
 
 
.. method:: encode(o)
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index 45f547741885a8..597849eca0524a 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -174,7 +174,7 @@ def default(self, o):
 else:
 return list(iterable)
 # Let the base class default method raise the TypeError
-return JSONEncoder.default(self, o)
+return super().default(o)
 
 """
 raise TypeError(f'Object of type {o.__class__.__name__} '

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-116012: Preserve GetLastError() across calls to TlsGetValue on Windows (GH-116014)

2024-02-28 Thread zooba
https://github.com/python/cpython/commit/9578288a3e5a7f42d1f3bec139c0c85b87775c90
commit: 9578288a3e5a7f42d1f3bec139c0c85b87775c90
branch: main
author: Steve Dower 
committer: zooba 
date: 2024-02-28T13:58:25Z
summary:

gh-116012: Preserve GetLastError() across calls to TlsGetValue on Windows 
(GH-116014)

files:
A Misc/NEWS.d/next/Windows/2024-02-27-23-21-55.gh-issue-116012.B9_IwM.rst
M Python/pystate.c
M Python/thread_nt.h

diff --git 
a/Misc/NEWS.d/next/Windows/2024-02-27-23-21-55.gh-issue-116012.B9_IwM.rst 
b/Misc/NEWS.d/next/Windows/2024-02-27-23-21-55.gh-issue-116012.B9_IwM.rst
new file mode 100644
index 00..a55e5b1c7b566d
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2024-02-27-23-21-55.gh-issue-116012.B9_IwM.rst
@@ -0,0 +1 @@
+Ensure the value of ``GetLastError()`` is preserved across GIL operations.
diff --git a/Python/pystate.c b/Python/pystate.c
index a80c1b7fb9c866..a370fff857af85 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -2528,16 +2528,7 @@ PyGILState_Check(void)
 return 0;
 }
 
-#ifdef MS_WINDOWS
-int err = GetLastError();
-#endif
-
 PyThreadState *tcur = gilstate_tss_get(runtime);
-
-#ifdef MS_WINDOWS
-SetLastError(err);
-#endif
-
 return (tstate == tcur);
 }
 
diff --git a/Python/thread_nt.h b/Python/thread_nt.h
index 7922b2d7e84845..9dca833ff203ca 100644
--- a/Python/thread_nt.h
+++ b/Python/thread_nt.h
@@ -513,5 +513,10 @@ void *
 PyThread_tss_get(Py_tss_t *key)
 {
 assert(key != NULL);
-return TlsGetValue(key->_key);
+int err = GetLastError();
+void *r = TlsGetValue(key->_key);
+if (r || !GetLastError()) {
+SetLastError(err);
+}
+return r;
 }

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] [3.11] doc: Use super() in subclassed JSONEncoder examples (GH-115565) (GH-116046)

2024-02-28 Thread encukou
https://github.com/python/cpython/commit/f2257402113b63c7d22af929d68918cae249402b
commit: f2257402113b63c7d22af929d68918cae249402b
branch: 3.11
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: encukou 
date: 2024-02-28T14:08:40Z
summary:

[3.11] doc: Use super() in subclassed JSONEncoder examples (GH-115565) 
(GH-116046)

doc: Use super() in subclassed JSONEncoder examples (GH-115565)

Replace calls to `json.JSONEncoder.default(self, obj)`
by `super().default(obj)` within the examples of the documentation.
(cherry picked from commit 647053fed182066d3b8c934fb0bf52ee48ff3911)

Co-authored-by: Jan Max Meyer 

files:
M Doc/library/json.rst
M Lib/json/encoder.py

diff --git a/Doc/library/json.rst b/Doc/library/json.rst
index e234fe92bc9995..226d1c3dbfcf63 100644
--- a/Doc/library/json.rst
+++ b/Doc/library/json.rst
@@ -95,7 +95,7 @@ Extending :class:`JSONEncoder`::
 ... if isinstance(obj, complex):
 ... return [obj.real, obj.imag]
 ... # Let the base class default method raise the TypeError
-... return json.JSONEncoder.default(self, obj)
+... return super().default(obj)
 ...
 >>> json.dumps(2 + 1j, cls=ComplexEncoder)
 '[2.0, 1.0]'
@@ -493,7 +493,7 @@ Encoders and Decoders
 else:
 return list(iterable)
 # Let the base class default method raise the TypeError
-return json.JSONEncoder.default(self, o)
+return super().default(o)
 
 
.. method:: encode(o)
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index 45f547741885a8..597849eca0524a 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -174,7 +174,7 @@ def default(self, o):
 else:
 return list(iterable)
 # Let the base class default method raise the TypeError
-return JSONEncoder.default(self, o)
+return super().default(o)
 
 """
 raise TypeError(f'Object of type {o.__class__.__name__} '

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] [3.12] doc: Use super() in subclassed JSONEncoder examples (GH-115565) (GH-116047)

2024-02-28 Thread encukou
https://github.com/python/cpython/commit/9df6d1e03338d1d7bc12e0c5e9e2559e2a3ab706
commit: 9df6d1e03338d1d7bc12e0c5e9e2559e2a3ab706
branch: 3.12
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: encukou 
date: 2024-02-28T14:13:08Z
summary:

[3.12] doc: Use super() in subclassed JSONEncoder examples (GH-115565) 
(GH-116047)

doc: Use super() in subclassed JSONEncoder examples (GH-115565)

Replace calls to `json.JSONEncoder.default(self, obj)`
by `super().default(obj)` within the examples of the documentation.
(cherry picked from commit 647053fed182066d3b8c934fb0bf52ee48ff3911)

Co-authored-by: Jan Max Meyer 

files:
M Doc/library/json.rst
M Lib/json/encoder.py

diff --git a/Doc/library/json.rst b/Doc/library/json.rst
index e234fe92bc9995..226d1c3dbfcf63 100644
--- a/Doc/library/json.rst
+++ b/Doc/library/json.rst
@@ -95,7 +95,7 @@ Extending :class:`JSONEncoder`::
 ... if isinstance(obj, complex):
 ... return [obj.real, obj.imag]
 ... # Let the base class default method raise the TypeError
-... return json.JSONEncoder.default(self, obj)
+... return super().default(obj)
 ...
 >>> json.dumps(2 + 1j, cls=ComplexEncoder)
 '[2.0, 1.0]'
@@ -493,7 +493,7 @@ Encoders and Decoders
 else:
 return list(iterable)
 # Let the base class default method raise the TypeError
-return json.JSONEncoder.default(self, o)
+return super().default(o)
 
 
.. method:: encode(o)
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index 45f547741885a8..597849eca0524a 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -174,7 +174,7 @@ def default(self, o):
 else:
 return list(iterable)
 # Let the base class default method raise the TypeError
-return JSONEncoder.default(self, o)
+return super().default(o)
 
 """
 raise TypeError(f'Object of type {o.__class__.__name__} '

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-107674: Improve performance of `sys.settrace` (GH-114986)

2024-02-28 Thread markshannon
https://github.com/python/cpython/commit/0a61e237009bf6b833e13ac635299ee063377699
commit: 0a61e237009bf6b833e13ac635299ee063377699
branch: main
author: Tian Gao 
committer: markshannon 
date: 2024-02-28T15:21:42Z
summary:

gh-107674: Improve performance of `sys.settrace` (GH-114986)

files:
A Misc/NEWS.d/next/Core and 
Builtins/2024-02-04-07-45-29.gh-issue-107674.q8mCmi.rst
M Python/bytecodes.c
M Python/ceval.c
M Python/ceval_macros.h
M Python/executor_cases.c.h
M Python/generated_cases.c.h
M Python/instrumentation.c

diff --git a/Misc/NEWS.d/next/Core and 
Builtins/2024-02-04-07-45-29.gh-issue-107674.q8mCmi.rst b/Misc/NEWS.d/next/Core 
and Builtins/2024-02-04-07-45-29.gh-issue-107674.q8mCmi.rst
new file mode 100644
index 00..f9b96788bfad94
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and 
Builtins/2024-02-04-07-45-29.gh-issue-107674.q8mCmi.rst 
@@ -0,0 +1 @@
+Improved the performance of :func:`sys.settrace` significantly
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index e9e9425f826a2d..565379afc4b5a7 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -143,22 +143,23 @@ dummy_func(
 
 tier1 inst(RESUME, (--)) {
 assert(frame == tstate->current_frame);
-uintptr_t global_version =
-_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) &
-~_PY_EVAL_EVENTS_MASK;
-uintptr_t code_version = 
_PyFrame_GetCode(frame)->_co_instrumentation_version;
-assert((code_version & 255) == 0);
-if (code_version != global_version) {
-int err = _Py_Instrument(_PyFrame_GetCode(frame), 
tstate->interp);
-ERROR_IF(err, error);
-next_instr = this_instr;
-}
-else {
-if ((oparg & RESUME_OPARG_LOCATION_MASK) < 
RESUME_AFTER_YIELD_FROM) {
-CHECK_EVAL_BREAKER();
+if (tstate->tracing == 0) {
+uintptr_t global_version =
+_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) &
+~_PY_EVAL_EVENTS_MASK;
+uintptr_t code_version = 
_PyFrame_GetCode(frame)->_co_instrumentation_version;
+assert((code_version & 255) == 0);
+if (code_version != global_version) {
+int err = _Py_Instrument(_PyFrame_GetCode(frame), 
tstate->interp);
+ERROR_IF(err, error);
+next_instr = this_instr;
+DISPATCH();
 }
-this_instr->op.code = RESUME_CHECK;
 }
+if ((oparg & RESUME_OPARG_LOCATION_MASK) < 
RESUME_AFTER_YIELD_FROM) {
+CHECK_EVAL_BREAKER();
+}
+this_instr->op.code = RESUME_CHECK;
 }
 
 inst(RESUME_CHECK, (--)) {
@@ -169,13 +170,13 @@ dummy_func(
 uintptr_t eval_breaker = 
_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker);
 uintptr_t version = 
_PyFrame_GetCode(frame)->_co_instrumentation_version;
 assert((version & _PY_EVAL_EVENTS_MASK) == 0);
-DEOPT_IF(eval_breaker != version);
+DEOPT_IF(eval_breaker != version && tstate->tracing == 0);
 }
 
 inst(INSTRUMENTED_RESUME, (--)) {
 uintptr_t global_version = 
_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & ~_PY_EVAL_EVENTS_MASK;
 uintptr_t code_version = 
_PyFrame_GetCode(frame)->_co_instrumentation_version;
-if (code_version != global_version) {
+if (code_version != global_version && tstate->tracing == 0) {
 if (_Py_Instrument(_PyFrame_GetCode(frame), tstate->interp)) {
 GOTO_ERROR(error);
 }
diff --git a/Python/ceval.c b/Python/ceval.c
index 06c136aeb252c9..41e9310938d826 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -800,17 +800,23 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, 
_PyInterpreterFrame *frame, int
 {
 _Py_CODEUNIT *prev = frame->instr_ptr;
 _Py_CODEUNIT *here = frame->instr_ptr = next_instr;
-_PyFrame_SetStackPointer(frame, stack_pointer);
-int original_opcode = _Py_call_instrumentation_line(
-tstate, frame, here, prev);
-stack_pointer = _PyFrame_GetStackPointer(frame);
-if (original_opcode < 0) {
-next_instr = here+1;
-goto error;
-}
-next_instr = frame->instr_ptr;
-if (next_instr != here) {
-DISPATCH();
+int original_opcode = 0;
+if (tstate->tracing) {
+PyCodeObject *code = _PyFrame_GetCode(frame);
+original_opcode = code->_co_monitoring->lines[(int)(here - 
_PyCode_CODE(code))].original_opcode;
+} else {
+_PyFrame_SetStackPointer(frame, stack_pointer);
+original_opcode = _Py_call_instrumentation_line(
+tstate, frame, here, prev);
+stack_poin

[Python-checkins] gh-112075: Remove compiler warning from apple clang (gh-115855)

2024-02-28 Thread corona10
https://github.com/python/cpython/commit/f58f8cef7445ea04a69ba3e2848fffdb6b72df91
commit: f58f8cef7445ea04a69ba3e2848fffdb6b72df91
branch: main
author: Donghee Na 
committer: corona10 
date: 2024-02-29T02:51:59+09:00
summary:

gh-112075: Remove compiler warning from apple clang (gh-115855)

files:
M Objects/dictobject.c

diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index 5ae4c3dbea2380..58fe973bc7a036 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -5029,7 +5029,7 @@ dictiter_iternextkey(PyObject *self)
 
 PyObject *value;
 #ifdef Py_GIL_DISABLED
-if (!dictiter_iternext_threadsafe(d, self, &value, NULL) == 0) {
+if (dictiter_iternext_threadsafe(d, self, &value, NULL) < 0) {
 value = NULL;
 }
 #else
@@ -5152,7 +5152,7 @@ dictiter_iternextvalue(PyObject *self)
 
 PyObject *value;
 #ifdef Py_GIL_DISABLED
-if (!dictiter_iternext_threadsafe(d, self, NULL, &value) == 0) {
+if (dictiter_iternext_threadsafe(d, self, NULL, &value) < 0) {
 value = NULL;
 }
 #else

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-115816: Improve internal symbols API in optimizer (#116028)

2024-02-28 Thread gvanrossum
https://github.com/python/cpython/commit/e2a3e4b7488aff6fdc704a0f258bc315e96c1d6e
commit: e2a3e4b7488aff6fdc704a0f258bc315e96c1d6e
branch: main
author: Guido van Rossum 
committer: gvanrossum 
date: 2024-02-28T17:55:56Z
summary:

gh-115816: Improve internal symbols API in optimizer (#116028)

- Any `sym_set_...` call that attempts to set conflicting information
  cause the symbol to become `bottom` (contradiction).
- All `sym_is...` and similar calls return false or NULL for `bottom`.
- Everything's tested.
- The tests still pass with `PYTHONUOPSOPTIMIZE=1`.

files:
M Include/internal/pycore_optimizer.h
M Python/optimizer_analysis.c
M Python/optimizer_bytecodes.c
M Python/optimizer_symbols.c

diff --git a/Include/internal/pycore_optimizer.h 
b/Include/internal/pycore_optimizer.h
index 425bd693fac53d..265eae4e290c38 100644
--- a/Include/internal/pycore_optimizer.h
+++ b/Include/internal/pycore_optimizer.h
@@ -27,12 +27,12 @@ extern PyTypeObject _PyUOpExecutor_Type;
 extern PyTypeObject _PyUOpOptimizer_Type;
 
 /* Symbols */
+/* See explanation in optimizer_symbols.c */
 
 struct _Py_UopsSymbol {
-int flags;
-PyTypeObject *typ;
-// constant propagated value (might be NULL)
-PyObject *const_val;
+int flags;  // 0 bits: Top; 2 or more bits: Bottom
+PyTypeObject *typ;  // Borrowed reference
+PyObject *const_val;  // Owned reference (!)
 };
 
 // Holds locals, stack, locals, stack ... co_consts (in that order)
@@ -92,7 +92,9 @@ extern _Py_UopsSymbol *_Py_uop_sym_new_const(_Py_UOpsContext 
*ctx, PyObject *con
 extern _Py_UopsSymbol *_Py_uop_sym_new_null(_Py_UOpsContext *ctx);
 extern bool _Py_uop_sym_matches_type(_Py_UopsSymbol *sym, PyTypeObject *typ);
 extern void _Py_uop_sym_set_null(_Py_UopsSymbol *sym);
-extern void _Py_uop_sym_set_type(_Py_UopsSymbol *sym, PyTypeObject *tp);
+extern void _Py_uop_sym_set_non_null(_Py_UopsSymbol *sym);
+extern void _Py_uop_sym_set_type(_Py_UopsSymbol *sym, PyTypeObject *typ);
+extern void _Py_uop_sym_set_const(_Py_UopsSymbol *sym, PyObject *const_val);
 
 extern int _Py_uop_abstractcontext_init(_Py_UOpsContext *ctx);
 extern void _Py_uop_abstractcontext_fini(_Py_UOpsContext *ctx);
diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c
index b29a00c941e996..8e408ffbb1c2b5 100644
--- a/Python/optimizer_analysis.c
+++ b/Python/optimizer_analysis.c
@@ -294,7 +294,9 @@ remove_globals(_PyInterpreterFrame *frame, 
_PyUOpInstruction *buffer,
 #define sym_new_null _Py_uop_sym_new_null
 #define sym_matches_type _Py_uop_sym_matches_type
 #define sym_set_null _Py_uop_sym_set_null
+#define sym_set_non_null _Py_uop_sym_set_non_null
 #define sym_set_type _Py_uop_sym_set_type
+#define sym_set_const _Py_uop_sym_set_const
 #define frame_new _Py_uop_frame_new
 #define frame_pop _Py_uop_frame_pop
 
diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c
index 68737389c66b67..b65e90bf980e5a 100644
--- a/Python/optimizer_bytecodes.c
+++ b/Python/optimizer_bytecodes.c
@@ -22,7 +22,9 @@ typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame;
 #define sym_new_null _Py_uop_sym_new_null
 #define sym_matches_type _Py_uop_sym_matches_type
 #define sym_set_null _Py_uop_sym_set_null
+#define sym_set_non_null _Py_uop_sym_set_non_null
 #define sym_set_type _Py_uop_sym_set_type
+#define sym_set_const _Py_uop_sym_set_const
 #define frame_new _Py_uop_frame_new
 #define frame_pop _Py_uop_frame_pop
 
diff --git a/Python/optimizer_symbols.c b/Python/optimizer_symbols.c
index 794d73733f85a7..158ee67d19f50e 100644
--- a/Python/optimizer_symbols.c
+++ b/Python/optimizer_symbols.c
@@ -10,11 +10,26 @@
 #include 
 #include 
 
+/* Symbols
+   ===
+
+   See the diagram at
+   
https://github.com/faster-cpython/ideas/blob/main/3.13/redundancy_eliminator.md
+
+   We represent the nodes in the diagram as follows
+   (the flag bits are only defined in optimizer_symbols.c):
+   - Top: no flag bits, typ and const_val are NULL.
+   - NULL: IS_NULL flag set, type and const_val NULL.
+   - Not NULL: NOT_NULL flag set, type and const_val NULL.
+   - None/not None: not used. (None could be represented as any other 
constant.)
+   - Known type: NOT_NULL flag set and typ set; const_val is NULL.
+   - Known constant: NOT_NULL flag set, type set, const_val set.
+   - Bottom: IS_NULL and NOT_NULL flags set, type and const_val NULL.
+ */
+
 // Flags for below.
-#define KNOWN  1 << 0
-#define TRUE_CONST 1 << 1
-#define IS_NULL1 << 2
-#define NOT_NULL   1 << 3
+#define IS_NULL1 << 0
+#define NOT_NULL   1 << 1
 
 #ifdef Py_DEBUG
 static inline int get_lltrace(void) {
@@ -31,9 +46,8 @@ static inline int get_lltrace(void) {
 #define DPRINTF(level, ...)
 #endif
 
-// Takes a borrowed reference to const_val, turns that into a strong reference.
 static _Py_UopsSymbol *
-sym_new(_Py_UOpsContext *ctx, PyObject *const_val)
+sym_new(_Py_UOpsContext *ctx)
 {
 _Py_UopsSymbol *self = &ctx->t_arena.arena[ctx->t_arena.ty_curr_number];
 if (ctx->t_arena.ty_curr_number 

[Python-checkins] [3.11] gh-116034: fix location info on the error of a failed assertion (#116052)

2024-02-28 Thread iritkatriel
https://github.com/python/cpython/commit/99ab0d49e205cc3977205cc3b98d0b0224c1e642
commit: 99ab0d49e205cc3977205cc3b98d0b0224c1e642
branch: 3.11
author: Irit Katriel <1055913+iritkatr...@users.noreply.github.com>
committer: iritkatriel <1055913+iritkatr...@users.noreply.github.com>
date: 2024-02-28T19:12:16Z
summary:

[3.11] gh-116034: fix location info on the error of a failed assertion (#116052)

files:
A Misc/NEWS.d/next/Core and 
Builtins/2024-02-28-17-25-19.gh-issue-116034.-Uu9tf.rst
M Lib/test/test_traceback.py
M Python/compile.c

diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index a857d61e9b4a5f..16971a555af1a4 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -636,6 +636,23 @@ def f_with_binary_operator():
 result_lines = self.get_exception(f_with_binary_operator)
 self.assertEqual(result_lines, expected_error.splitlines())
 
+def test_caret_for_failed_assertion(self):
+def f_assert():
+test = 3
+assert test == 1 and test == 2, "Bug found?"
+
+lineno_f = f_assert.__code__.co_firstlineno
+expected_error = (
+'Traceback (most recent call last):\n'
+f'  File "{__file__}", line {self.callable_line}, in 
get_exception\n'
+'callable()\n'
+f'  File "{__file__}", line {lineno_f+2}, in f_assert\n'
+'assert test == 1 and test == 2, "Bug found?"\n'
+'   ^^^\n'
+)
+result_lines = self.get_exception(f_assert)
+self.assertEqual(result_lines, expected_error.splitlines())
+
 def test_traceback_specialization_with_syntax_error(self):
 bytecode = compile("1 / 0 / 1 / 2\n", TESTFN, "exec")
 
diff --git a/Misc/NEWS.d/next/Core and 
Builtins/2024-02-28-17-25-19.gh-issue-116034.-Uu9tf.rst b/Misc/NEWS.d/next/Core 
and Builtins/2024-02-28-17-25-19.gh-issue-116034.-Uu9tf.rst
new file mode 100644
index 00..c711b63605d1a1
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and 
Builtins/2024-02-28-17-25-19.gh-issue-116034.-Uu9tf.rst 
@@ -0,0 +1 @@
+Fix location of the error on a failed assertion.
diff --git a/Python/compile.c b/Python/compile.c
index 558df3fca653ea..16bc0ed013c9f1 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -4050,6 +4050,7 @@ compiler_assert(struct compiler *c, stmt_ty s)
 ADDOP_I(c, PRECALL, 0);
 ADDOP_I(c, CALL, 0);
 }
+SET_LOC(c, s->v.Assert.test);
 ADDOP_I(c, RAISE_VARARGS, 1);
 compiler_use_next_block(c, end);
 return 1;

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] GH-115986 Reorder pprint docs and amend some references (#116019)

2024-02-28 Thread erlend-aasland
https://github.com/python/cpython/commit/6c1c94dc517b77afcebb25436a4b7b0d13b6eb4d
commit: 6c1c94dc517b77afcebb25436a4b7b0d13b6eb4d
branch: main
author: Kerim Kabirov <39376984+privat33r-...@users.noreply.github.com>
committer: erlend-aasland 
date: 2024-02-28T20:43:05+01:00
summary:

GH-115986 Reorder pprint docs and amend some references (#116019)

Introduce a new subsubsection, 'Functions', for module level functions, 
  
and place it before the PrettyPrinter class reference.  
  

  
Also:
- Fix pprint.pprint() references so they properly link to the module
   
  level function.
- Add links to sys.stdout.

files:
M Doc/library/pprint.rst

diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst
index e883acd67d6c72..2a2eb098646364 100644
--- a/Doc/library/pprint.rst
+++ b/Doc/library/pprint.rst
@@ -31,7 +31,93 @@ Dictionaries are sorted by key before the display is 
computed.
 .. versionchanged:: 3.10
Added support for pretty-printing :class:`dataclasses.dataclass`.
 
-The :mod:`pprint` module defines one class:
+.. _pprint-functions:
+
+Functions
+-
+
+.. function:: pp(object, *args, sort_dicts=False, **kwargs)
+
+   Prints the formatted representation of *object* followed by a newline.
+   If *sort_dicts* is false (the default), dictionaries will be displayed with
+   their keys in insertion order, otherwise the dict keys will be sorted.
+   *args* and *kwargs* will be passed to :func:`~pprint.pprint` as formatting
+   parameters.
+
+   .. versionadded:: 3.8
+
+
+.. function:: pprint(object, stream=None, indent=1, width=80, depth=None, *, \
+ compact=False, sort_dicts=True, underscore_numbers=False)
+
+   Prints the formatted representation of *object* on *stream*, followed by a
+   newline.  If *stream* is ``None``, :data:`sys.stdout` is used. This may be 
used
+   in the interactive interpreter instead of the :func:`print` function for
+   inspecting values (you can even reassign ``print = pprint.pprint`` for use
+   within a scope).
+
+   The configuration parameters *stream*, *indent*, *width*, *depth*,
+   *compact*, *sort_dicts* and *underscore_numbers* are passed to the
+   :class:`PrettyPrinter` constructor and their meanings are as
+   described in its documentation above.
+
+  >>> import pprint
+  >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni']
+  >>> stuff.insert(0, stuff)
+  >>> pprint.pprint(stuff)
+  [,
+   'spam',
+   'eggs',
+   'lumberjack',
+   'knights',
+   'ni']
+
+.. function:: pformat(object, indent=1, width=80, depth=None, *, \
+  compact=False, sort_dicts=True, underscore_numbers=False)
+
+   Return the formatted representation of *object* as a string.  *indent*,
+   *width*, *depth*, *compact*, *sort_dicts* and *underscore_numbers* are
+   passed to the :class:`PrettyPrinter` constructor as formatting parameters
+   and their meanings are as described in its documentation above.
+
+
+.. function:: isreadable(object)
+
+   .. index:: pair: built-in function; eval
+
+   Determine if the formatted representation of *object* is "readable", or can 
be
+   used to reconstruct the value using :func:`eval`.  This always returns 
``False``
+   for recursive objects.
+
+  >>> pprint.isreadable(stuff)
+  False
+
+
+.. function:: isrecursive(object)
+
+   Determine if *object* requires a recursive representation.  This function is
+   subject to the same limitations as noted in :func:`saferepr` below and may 
raise an
+   :exc:`RecursionError` if it fails to detect a recursive object.
+
+
+.. function:: saferepr(object)
+
+   Return a string representation of *object*, protected against recursion in
+   some common data structures, namely instances of :class:`dict`, 
:class:`list`
+   and :class:`tuple` or subclasses whose ``__repr__`` has not been 
overridden.  If the
+   representation of object exposes a recursive entry, the recursive reference
+   will be represented as .  The
+   representation is not otherwise formatted.
+
+   >>> pprint.saferepr(stuff)
+   "[, 'spam', 'eggs', 'lumberjack', 'knights', 
'ni']"
+
+.. _prettyprinter-objects:
+
+PrettyPrinter Objects
+-
+
+This module defines one class:
 
 .. First the implementation class:
 
@@ -44,9 +130,9 @@ The :mod:`pprint` module defines one class:
Construct a :class:`PrettyPrinter` instance.  This constructor understands
several keyword parameters.
 
-   *stream* (default ``sys.stdout``) is a :term:`file-like object` to
+   *stream* (default :data:`!sys.stdout`) is a :term:`file-like object` to
which the output will be written by calling its :meth:`!write` method.
-   If both *stream* and ``sys.stdout`` are ``None``, then
+   If both *stream* and :data:`!sys.stdout` a

[Python-checkins] [3.12] GH-115986 Reorder pprint docs and amend some references (GH-116019) (#116061)

2024-02-28 Thread erlend-aasland
https://github.com/python/cpython/commit/91e680b85cb3640660db3eccdd8d6f34dad2cfdc
commit: 91e680b85cb3640660db3eccdd8d6f34dad2cfdc
branch: 3.12
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: erlend-aasland 
date: 2024-02-28T21:06:08+01:00
summary:

[3.12] GH-115986 Reorder pprint docs and amend some references (GH-116019) 
(#116061)

GH-115986 Reorder pprint docs and amend some references (GH-116019)

Introduce a new subsubsection, 'Functions', for module level functions,
and place it before the PrettyPrinter class reference.

Also:
- Fix pprint.pprint() references so they properly link to the module
  level function.
- Add links to sys.stdout.
(cherry picked from commit 6c1c94dc517b77afcebb25436a4b7b0d13b6eb4d)

Co-authored-by: Kerim Kabirov <39376984+privat33r-...@users.noreply.github.com>

files:
M Doc/library/pprint.rst

diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst
index e883acd67d6c72..2a2eb098646364 100644
--- a/Doc/library/pprint.rst
+++ b/Doc/library/pprint.rst
@@ -31,7 +31,93 @@ Dictionaries are sorted by key before the display is 
computed.
 .. versionchanged:: 3.10
Added support for pretty-printing :class:`dataclasses.dataclass`.
 
-The :mod:`pprint` module defines one class:
+.. _pprint-functions:
+
+Functions
+-
+
+.. function:: pp(object, *args, sort_dicts=False, **kwargs)
+
+   Prints the formatted representation of *object* followed by a newline.
+   If *sort_dicts* is false (the default), dictionaries will be displayed with
+   their keys in insertion order, otherwise the dict keys will be sorted.
+   *args* and *kwargs* will be passed to :func:`~pprint.pprint` as formatting
+   parameters.
+
+   .. versionadded:: 3.8
+
+
+.. function:: pprint(object, stream=None, indent=1, width=80, depth=None, *, \
+ compact=False, sort_dicts=True, underscore_numbers=False)
+
+   Prints the formatted representation of *object* on *stream*, followed by a
+   newline.  If *stream* is ``None``, :data:`sys.stdout` is used. This may be 
used
+   in the interactive interpreter instead of the :func:`print` function for
+   inspecting values (you can even reassign ``print = pprint.pprint`` for use
+   within a scope).
+
+   The configuration parameters *stream*, *indent*, *width*, *depth*,
+   *compact*, *sort_dicts* and *underscore_numbers* are passed to the
+   :class:`PrettyPrinter` constructor and their meanings are as
+   described in its documentation above.
+
+  >>> import pprint
+  >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni']
+  >>> stuff.insert(0, stuff)
+  >>> pprint.pprint(stuff)
+  [,
+   'spam',
+   'eggs',
+   'lumberjack',
+   'knights',
+   'ni']
+
+.. function:: pformat(object, indent=1, width=80, depth=None, *, \
+  compact=False, sort_dicts=True, underscore_numbers=False)
+
+   Return the formatted representation of *object* as a string.  *indent*,
+   *width*, *depth*, *compact*, *sort_dicts* and *underscore_numbers* are
+   passed to the :class:`PrettyPrinter` constructor as formatting parameters
+   and their meanings are as described in its documentation above.
+
+
+.. function:: isreadable(object)
+
+   .. index:: pair: built-in function; eval
+
+   Determine if the formatted representation of *object* is "readable", or can 
be
+   used to reconstruct the value using :func:`eval`.  This always returns 
``False``
+   for recursive objects.
+
+  >>> pprint.isreadable(stuff)
+  False
+
+
+.. function:: isrecursive(object)
+
+   Determine if *object* requires a recursive representation.  This function is
+   subject to the same limitations as noted in :func:`saferepr` below and may 
raise an
+   :exc:`RecursionError` if it fails to detect a recursive object.
+
+
+.. function:: saferepr(object)
+
+   Return a string representation of *object*, protected against recursion in
+   some common data structures, namely instances of :class:`dict`, 
:class:`list`
+   and :class:`tuple` or subclasses whose ``__repr__`` has not been 
overridden.  If the
+   representation of object exposes a recursive entry, the recursive reference
+   will be represented as .  The
+   representation is not otherwise formatted.
+
+   >>> pprint.saferepr(stuff)
+   "[, 'spam', 'eggs', 'lumberjack', 'knights', 
'ni']"
+
+.. _prettyprinter-objects:
+
+PrettyPrinter Objects
+-
+
+This module defines one class:
 
 .. First the implementation class:
 
@@ -44,9 +130,9 @@ The :mod:`pprint` module defines one class:
Construct a :class:`PrettyPrinter` instance.  This constructor understands
several keyword parameters.
 
-   *stream* (default ``sys.stdout``) is a :term:`file-like object` to
+   *stream* (default :data:`!sys.stdout`) is a :term:`file-like object` to
which the output will be written by calling its :meth:`!write` method.
-   If both *stream* and ``sys.stdout`` are ``None``, then
+   If both *stream* and :data:

[Python-checkins] [3.11] GH-115986 Reorder pprint docs and amend some references (#116019) (#116064)

2024-02-28 Thread erlend-aasland
https://github.com/python/cpython/commit/02bb367748121b4c271ac07392820b5136db3571
commit: 02bb367748121b4c271ac07392820b5136db3571
branch: 3.11
author: Erlend E. Aasland 
committer: erlend-aasland 
date: 2024-02-28T20:11:12Z
summary:

[3.11] GH-115986 Reorder pprint docs and amend some references (#116019) 
(#116064)

(cherry picked from commit 6c1c94dc517b77afcebb25436a4b7b0d13b6eb4d)

Introduce a new subsubsection, 'Functions', for module level functions,
and place it before the PrettyPrinter class reference.

Also:
- Fix pprint.pprint() references so they properly link to the module
  level function.
- Add links to sys.stdout.

Co-authored-by: Kerim Kabirov <39376984+privat33r-...@users.noreply.github.com>

files:
M Doc/library/pprint.rst

diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst
index fa5153284a2aab..e7f0bddc9606f7 100644
--- a/Doc/library/pprint.rst
+++ b/Doc/library/pprint.rst
@@ -31,7 +31,90 @@ Dictionaries are sorted by key before the display is 
computed.
 .. versionchanged:: 3.10
Added support for pretty-printing :class:`dataclasses.dataclass`.
 
-The :mod:`pprint` module defines one class:
+.. _pprint-functions:
+
+Functions
+-
+
+.. function:: pp(object, *args, sort_dicts=False, **kwargs)
+
+   Prints the formatted representation of *object* followed by a newline.
+   If *sort_dicts* is false (the default), dictionaries will be displayed with
+   their keys in insertion order, otherwise the dict keys will be sorted.
+   *args* and *kwargs* will be passed to :func:`~pprint.pprint` as formatting
+   parameters.
+
+   .. versionadded:: 3.8
+
+
+.. function:: pprint(object, stream=None, indent=1, width=80, depth=None, *, \
+ compact=False, sort_dicts=True, underscore_numbers=False)
+
+   Prints the formatted representation of *object* on *stream*, followed by a
+   newline.  If *stream* is ``None``, :data:`sys.stdout` is used. This may be 
used
+   in the interactive interpreter instead of the :func:`print` function for
+   inspecting values (you can even reassign ``print = pprint.pprint`` for use
+   within a scope).
+
+   The configuration parameters *stream*, *indent*, *width*, *depth*,
+   *compact*, *sort_dicts* and *underscore_numbers* are passed to the
+   :class:`PrettyPrinter` constructor and their meanings are as
+   described in its documentation above.
+
+  >>> import pprint
+  >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni']
+  >>> stuff.insert(0, stuff)
+  >>> pprint.pprint(stuff)
+  [,
+   'spam',
+   'eggs',
+   'lumberjack',
+   'knights',
+   'ni']
+
+.. function:: pformat(object, indent=1, width=80, depth=None, *, \
+  compact=False, sort_dicts=True, underscore_numbers=False)
+
+   Return the formatted representation of *object* as a string.  *indent*,
+   *width*, *depth*, *compact*, *sort_dicts* and *underscore_numbers* are
+   passed to the :class:`PrettyPrinter` constructor as formatting parameters
+   and their meanings are as described in its documentation above.
+
+
+.. function:: isreadable(object)
+
+   .. index:: pair: built-in function; eval
+
+   Determine if the formatted representation of *object* is "readable", or can 
be
+   used to reconstruct the value using :func:`eval`.  This always returns 
``False``
+   for recursive objects.
+
+  >>> pprint.isreadable(stuff)
+  False
+
+
+.. function:: isrecursive(object)
+
+   Determine if *object* requires a recursive representation.
+
+
+.. function:: saferepr(object)
+
+   Return a string representation of *object*, protected against recursive data
+   structures.  If the representation of *object* exposes a recursive entry, 
the
+   recursive reference will be represented as .  The representation is not otherwise formatted.
+
+   >>> pprint.saferepr(stuff)
+   "[, 'spam', 'eggs', 'lumberjack', 'knights', 
'ni']"
+
+
+.. _prettyprinter-objects:
+
+PrettyPrinter Objects
+-
+
+This module defines one class:
 
 .. First the implementation class:
 
@@ -44,9 +127,9 @@ The :mod:`pprint` module defines one class:
Construct a :class:`PrettyPrinter` instance.  This constructor understands
several keyword parameters.
 
-   *stream* (default ``sys.stdout``) is a :term:`file-like object` to
+   *stream* (default :data:`!sys.stdout`) is a :term:`file-like object` to
which the output will be written by calling its :meth:`!write` method.
-   If both *stream* and ``sys.stdout`` are ``None``, then
+   If both *stream* and :data:`!sys.stdout` are ``None``, then
:meth:`~PrettyPrinter.pprint` silently returns.
 
Other values configure the manner in which nesting of complex data
@@ -87,7 +170,7 @@ The :mod:`pprint` module defines one class:
   Added the *underscore_numbers* parameter.
 
.. versionchanged:: 3.11
-  No longer attempts to write to ``sys.stdout`` if it is ``None``.
+  No longer attempts to write to :data:`!sys.stdout` if it is ``None``.
 
 

[Python-checkins] gh-115197: Stop resolving host in urllib.request proxy bypass (GH-115210)

2024-02-28 Thread gpshead
https://github.com/python/cpython/commit/c43b26d02eaa103756c250e8d36829d388c5f3be
commit: c43b26d02eaa103756c250e8d36829d388c5f3be
branch: main
author: Weii Wang 
committer: gpshead 
date: 2024-02-28T12:15:52-08:00
summary:

gh-115197: Stop resolving host in urllib.request proxy bypass (GH-115210)

Use of a proxy is intended to defer DNS for the hosts to the proxy itself, 
rather than a potential for information leak of the host doing DNS resolution 
itself for any reason.  Proxy bypass lists are strictly name based.  Most 
implementations of proxy support agree.

files:
A Misc/NEWS.d/next/Library/2024-02-09-19-41-48.gh-issue-115197.20wkWH.rst
M Lib/test/test_urllib2.py
M Lib/urllib/request.py

diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index fa528a675892b5..739c15df13de21 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -15,10 +15,11 @@
 import subprocess
 
 import urllib.request
-# The proxy bypass method imported below has logic specific to the OSX
-# proxy config data structure but is testable on all platforms.
+# The proxy bypass method imported below has logic specific to the
+# corresponding system but is testable on all platforms.
 from urllib.request import (Request, OpenerDirector, HTTPBasicAuthHandler,
 HTTPPasswordMgrWithPriorAuth, _parse_proxy,
+_proxy_bypass_winreg_override,
 _proxy_bypass_macosx_sysconf,
 AbstractDigestAuthHandler)
 from urllib.parse import urlparse
@@ -1485,6 +1486,30 @@ def test_proxy_https_proxy_authorization(self):
 self.assertEqual(req.host, "proxy.example.com:3128")
 self.assertEqual(req.get_header("Proxy-authorization"), "FooBar")
 
+@unittest.skipUnless(os.name == "nt", "only relevant for Windows")
+def test_winreg_proxy_bypass(self):
+proxy_override = "www.example.com;*.example.net; 192.168.0.1"
+proxy_bypass = _proxy_bypass_winreg_override
+for host in ("www.example.com", "www.example.net", "192.168.0.1"):
+self.assertTrue(proxy_bypass(host, proxy_override),
+"expected bypass of %s to be true" % host)
+
+for host in ("example.com", "www.example.org", "example.net",
+ "192.168.0.2"):
+self.assertFalse(proxy_bypass(host, proxy_override),
+ "expected bypass of %s to be False" % host)
+
+# check intranet address bypass
+proxy_override = "example.com; "
+self.assertTrue(proxy_bypass("example.com", proxy_override),
+"expected bypass of %s to be true" % host)
+self.assertFalse(proxy_bypass("example.net", proxy_override),
+ "expected bypass of %s to be False" % host)
+for host in ("test", "localhost"):
+self.assertTrue(proxy_bypass(host, proxy_override),
+"expect  to bypass intranet address '%s'"
+% host)
+
 @unittest.skipUnless(sys.platform == 'darwin', "only relevant for OSX")
 def test_osx_proxy_bypass(self):
 bypass = {
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index bca594420f6d9d..d22af6618d80f1 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -2563,6 +2563,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
 }
 """
 from fnmatch import fnmatch
+from ipaddress import AddressValueError, IPv4Address
 
 hostonly, port = _splitport(host)
 
@@ -2579,20 +2580,17 @@ def ip2num(ipAddr):
 return True
 
 hostIP = None
+try:
+hostIP = int(IPv4Address(hostonly))
+except AddressValueError:
+pass
 
 for value in proxy_settings.get('exceptions', ()):
 # Items in the list are strings like these: *.local, 169.254/16
 if not value: continue
 
 m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
-if m is not None:
-if hostIP is None:
-try:
-hostIP = socket.gethostbyname(hostonly)
-hostIP = ip2num(hostIP)
-except OSError:
-continue
-
+if m is not None and hostIP is not None:
 base = ip2num(m.group(1))
 mask = m.group(2)
 if mask is None:
@@ -2615,6 +2613,31 @@ def ip2num(ipAddr):
 return False
 
 
+# Same as _proxy_bypass_macosx_sysconf, testable on all platforms
+def _proxy_bypass_winreg_override(host, override):
+"""Return True if the host should bypass the proxy server.
+
+The proxy override list is obtained from the Windows
+Internet settings proxy override registry value.
+
+An example of a proxy override value is:
+"www.example.com;*.example.net; 192.168.0.1"
+"""
+from fnmatch import fnmatch
+
+host, _ = _splitport(host)
+proxy_override = override.split(';')
+for test in proxy_overrid

[Python-checkins] gh-112529: Simplify PyObject_GC_IsTracked and PyObject_GC_IsFinalized (#114732)

2024-02-28 Thread colesbury
https://github.com/python/cpython/commit/df5212df6c6f08308c68de4b3ed8a1b51ac6334b
commit: df5212df6c6f08308c68de4b3ed8a1b51ac6334b
branch: main
author: Sam Gross 
committer: colesbury 
date: 2024-02-28T15:37:59-05:00
summary:

gh-112529: Simplify PyObject_GC_IsTracked and PyObject_GC_IsFinalized (#114732)

files:
M Modules/clinic/gcmodule.c.h
M Modules/gcmodule.c
M Python/gc_free_threading.c

diff --git a/Modules/clinic/gcmodule.c.h b/Modules/clinic/gcmodule.c.h
index d50d170589a2cd..9fff4da616ba00 100644
--- a/Modules/clinic/gcmodule.c.h
+++ b/Modules/clinic/gcmodule.c.h
@@ -469,6 +469,25 @@ PyDoc_STRVAR(gc_is_tracked__doc__,
 #define GC_IS_TRACKED_METHODDEF\
 {"is_tracked", (PyCFunction)gc_is_tracked, METH_O, gc_is_tracked__doc__},
 
+static int
+gc_is_tracked_impl(PyObject *module, PyObject *obj);
+
+static PyObject *
+gc_is_tracked(PyObject *module, PyObject *obj)
+{
+PyObject *return_value = NULL;
+int _return_value;
+
+_return_value = gc_is_tracked_impl(module, obj);
+if ((_return_value == -1) && PyErr_Occurred()) {
+goto exit;
+}
+return_value = PyBool_FromLong((long)_return_value);
+
+exit:
+return return_value;
+}
+
 PyDoc_STRVAR(gc_is_finalized__doc__,
 "is_finalized($module, obj, /)\n"
 "--\n"
@@ -478,6 +497,25 @@ PyDoc_STRVAR(gc_is_finalized__doc__,
 #define GC_IS_FINALIZED_METHODDEF\
 {"is_finalized", (PyCFunction)gc_is_finalized, METH_O, 
gc_is_finalized__doc__},
 
+static int
+gc_is_finalized_impl(PyObject *module, PyObject *obj);
+
+static PyObject *
+gc_is_finalized(PyObject *module, PyObject *obj)
+{
+PyObject *return_value = NULL;
+int _return_value;
+
+_return_value = gc_is_finalized_impl(module, obj);
+if ((_return_value == -1) && PyErr_Occurred()) {
+goto exit;
+}
+return_value = PyBool_FromLong((long)_return_value);
+
+exit:
+return return_value;
+}
+
 PyDoc_STRVAR(gc_freeze__doc__,
 "freeze($module, /)\n"
 "--\n"
@@ -547,4 +585,4 @@ gc_get_freeze_count(PyObject *module, PyObject 
*Py_UNUSED(ignored))
 exit:
 return return_value;
 }
-/*[clinic end generated code: output=258f92524c1141fc input=a9049054013a1b77]*/
+/*[clinic end generated code: output=0a7e91917adcb937 input=a9049054013a1b77]*/
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
index 961165e16a0fee..9807d2e7d48a36 100644
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -383,7 +383,7 @@ gc_get_stats_impl(PyObject *module)
 
 
 /*[clinic input]
-gc.is_tracked
+gc.is_tracked -> bool
 
 obj: object
 /
@@ -393,21 +393,15 @@ Returns true if the object is tracked by the garbage 
collector.
 Simple atomic objects will return false.
 [clinic start generated code]*/
 
-static PyObject *
-gc_is_tracked(PyObject *module, PyObject *obj)
-/*[clinic end generated code: output=14f0103423b28e31 input=d83057f170ea2723]*/
+static int
+gc_is_tracked_impl(PyObject *module, PyObject *obj)
+/*[clinic end generated code: output=91c8d086b7f47a33 input=423b98ec680c3126]*/
 {
-PyObject *result;
-
-if (_PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj))
-result = Py_True;
-else
-result = Py_False;
-return Py_NewRef(result);
+return PyObject_GC_IsTracked(obj);
 }
 
 /*[clinic input]
-gc.is_finalized
+gc.is_finalized -> bool
 
 obj: object
 /
@@ -415,14 +409,11 @@ gc.is_finalized
 Returns true if the object has been already finalized by the GC.
 [clinic start generated code]*/
 
-static PyObject *
-gc_is_finalized(PyObject *module, PyObject *obj)
-/*[clinic end generated code: output=e1516ac119a918ed input=201d0c58f69ae390]*/
+static int
+gc_is_finalized_impl(PyObject *module, PyObject *obj)
+/*[clinic end generated code: output=401ff5d6fc660429 input=ca4d111c8f8c4e3a]*/
 {
-if (_PyObject_IS_GC(obj) && _PyGC_FINALIZED(obj)) {
- Py_RETURN_TRUE;
-}
-Py_RETURN_FALSE;
+return PyObject_GC_IsFinalized(obj);
 }
 
 /*[clinic input]
diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c
index 14790899825de1..d4fb50106093ee 100644
--- a/Python/gc_free_threading.c
+++ b/Python/gc_free_threading.c
@@ -1693,19 +1693,13 @@ PyObject_GC_Del(void *op)
 int
 PyObject_GC_IsTracked(PyObject* obj)
 {
-if (_PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)) {
-return 1;
-}
-return 0;
+return _PyObject_GC_IS_TRACKED(obj);
 }
 
 int
 PyObject_GC_IsFinalized(PyObject *obj)
 {
-if (_PyObject_IS_GC(obj) && _PyGC_FINALIZED(obj)) {
- return 1;
-}
-return 0;
+return _PyGC_FINALIZED(obj);
 }
 
 struct custom_visitor_args {

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] [3.12] gh-115197: Stop resolving host in urllib.request proxy bypass (GH-115210)

2024-02-28 Thread gpshead
https://github.com/python/cpython/commit/dec637a95367084d5cd298c1458447cad77546be
commit: dec637a95367084d5cd298c1458447cad77546be
branch: 3.12
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: gpshead 
date: 2024-02-28T20:47:25Z
summary:

[3.12] gh-115197: Stop resolving host in urllib.request proxy bypass (GH-115210)

gh-115197: Stop resolving host in urllib.request proxy bypass (GH-115210)

Use of a proxy is intended to defer DNS for the hosts to the proxy itself, 
rather than a potential for information leak of the host doing DNS resolution 
itself for any reason.  Proxy bypass lists are strictly name based.  Most 
implementations of proxy support agree.
(cherry picked from commit c43b26d02eaa103756c250e8d36829d388c5f3be)

Co-authored-by: Weii Wang 

files:
A Misc/NEWS.d/next/Library/2024-02-09-19-41-48.gh-issue-115197.20wkWH.rst
M Lib/test/test_urllib2.py
M Lib/urllib/request.py

diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 99c9e24994732f..8431b1018d5fa4 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -14,10 +14,11 @@
 import subprocess
 
 import urllib.request
-# The proxy bypass method imported below has logic specific to the OSX
-# proxy config data structure but is testable on all platforms.
+# The proxy bypass method imported below has logic specific to the
+# corresponding system but is testable on all platforms.
 from urllib.request import (Request, OpenerDirector, HTTPBasicAuthHandler,
 HTTPPasswordMgrWithPriorAuth, _parse_proxy,
+_proxy_bypass_winreg_override,
 _proxy_bypass_macosx_sysconf,
 AbstractDigestAuthHandler)
 from urllib.parse import urlparse
@@ -1483,6 +1484,30 @@ def test_proxy_https_proxy_authorization(self):
 self.assertEqual(req.host, "proxy.example.com:3128")
 self.assertEqual(req.get_header("Proxy-authorization"), "FooBar")
 
+@unittest.skipUnless(os.name == "nt", "only relevant for Windows")
+def test_winreg_proxy_bypass(self):
+proxy_override = "www.example.com;*.example.net; 192.168.0.1"
+proxy_bypass = _proxy_bypass_winreg_override
+for host in ("www.example.com", "www.example.net", "192.168.0.1"):
+self.assertTrue(proxy_bypass(host, proxy_override),
+"expected bypass of %s to be true" % host)
+
+for host in ("example.com", "www.example.org", "example.net",
+ "192.168.0.2"):
+self.assertFalse(proxy_bypass(host, proxy_override),
+ "expected bypass of %s to be False" % host)
+
+# check intranet address bypass
+proxy_override = "example.com; "
+self.assertTrue(proxy_bypass("example.com", proxy_override),
+"expected bypass of %s to be true" % host)
+self.assertFalse(proxy_bypass("example.net", proxy_override),
+ "expected bypass of %s to be False" % host)
+for host in ("test", "localhost"):
+self.assertTrue(proxy_bypass(host, proxy_override),
+"expect  to bypass intranet address '%s'"
+% host)
+
 @unittest.skipUnless(sys.platform == 'darwin', "only relevant for OSX")
 def test_osx_proxy_bypass(self):
 bypass = {
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 5314b3f26021eb..7228a35534b638 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -2589,6 +2589,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
 }
 """
 from fnmatch import fnmatch
+from ipaddress import AddressValueError, IPv4Address
 
 hostonly, port = _splitport(host)
 
@@ -2605,20 +2606,17 @@ def ip2num(ipAddr):
 return True
 
 hostIP = None
+try:
+hostIP = int(IPv4Address(hostonly))
+except AddressValueError:
+pass
 
 for value in proxy_settings.get('exceptions', ()):
 # Items in the list are strings like these: *.local, 169.254/16
 if not value: continue
 
 m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
-if m is not None:
-if hostIP is None:
-try:
-hostIP = socket.gethostbyname(hostonly)
-hostIP = ip2num(hostIP)
-except OSError:
-continue
-
+if m is not None and hostIP is not None:
 base = ip2num(m.group(1))
 mask = m.group(2)
 if mask is None:
@@ -2641,6 +2639,31 @@ def ip2num(ipAddr):
 return False
 
 
+# Same as _proxy_bypass_macosx_sysconf, testable on all platforms
+def _proxy_bypass_winreg_override(host, override):
+"""Return True if the host should bypass the proxy server.
+
+The proxy override list is obtained from the Windows
+Internet settings proxy override registry value.
+
+An e

[Python-checkins] gh-115891: Fix debug byte filling in free-threaded build (#116018)

2024-02-28 Thread DinoV
https://github.com/python/cpython/commit/75c6c05fea212330f4b0259602ffae1b2cb91be3
commit: 75c6c05fea212330f4b0259602ffae1b2cb91be3
branch: main
author: Sam Gross 
committer: DinoV 
date: 2024-02-28T12:50:09-08:00
summary:

gh-115891: Fix debug byte filling in free-threaded build (#116018)

The previous code had two bugs. First, the debug offset in the mimalloc
heap includes the two pymalloc debug words, but the pointer passed to
fill_mem_debug does not include them. Second, the current object heap is
correct source for allocations, but not deallocations.

files:
M Objects/obmalloc.c

diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
index 43427d4449eb1a..b2a2286ef22b66 100644
--- a/Objects/obmalloc.c
+++ b/Objects/obmalloc.c
@@ -2460,14 +2460,23 @@ write_size_t(void *p, size_t n)
 }
 
 static void
-fill_mem_debug(debug_alloc_api_t *api, void *data, int c, size_t nbytes)
+fill_mem_debug(debug_alloc_api_t *api, void *data, int c, size_t nbytes,
+   bool is_alloc)
 {
 #ifdef Py_GIL_DISABLED
 if (api->api_id == 'o') {
 // Don't overwrite the first few bytes of a PyObject allocation in the
 // free-threaded build
 _PyThreadStateImpl *tstate = (_PyThreadStateImpl 
*)_PyThreadState_GET();
-size_t debug_offset = 
tstate->mimalloc.current_object_heap->debug_offset;
+size_t debug_offset;
+if (is_alloc) {
+debug_offset = tstate->mimalloc.current_object_heap->debug_offset;
+}
+else {
+char *alloc = (char *)data - 2*SST;  // start of the allocation
+debug_offset = _mi_ptr_page(alloc)->debug_offset;
+}
+debug_offset -= 2*SST;  // account for pymalloc extra bytes
 if (debug_offset < nbytes) {
 memset((char *)data + debug_offset, c, nbytes - debug_offset);
 }
@@ -2553,7 +2562,7 @@ _PyMem_DebugRawAlloc(int use_calloc, void *ctx, size_t 
nbytes)
 memset(p + SST + 1, PYMEM_FORBIDDENBYTE, SST-1);
 
 if (nbytes > 0 && !use_calloc) {
-fill_mem_debug(api, data, PYMEM_CLEANBYTE, nbytes);
+fill_mem_debug(api, data, PYMEM_CLEANBYTE, nbytes, true);
 }
 
 /* at tail, write pad (SST bytes) and serialno (SST bytes) */
@@ -2603,7 +2612,7 @@ _PyMem_DebugRawFree(void *ctx, void *p)
 nbytes = read_size_t(q);
 nbytes += PYMEM_DEBUG_EXTRA_BYTES - 2*SST;
 memset(q, PYMEM_DEADBYTE, 2*SST);
-fill_mem_debug(api, p, PYMEM_DEADBYTE, nbytes);
+fill_mem_debug(api, p, PYMEM_DEADBYTE, nbytes, false);
 api->alloc.free(api->alloc.ctx, q);
 }
 

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] [3.11] gh-115197: Stop resolving host in urllib.request proxy bypass (GH-115210)

2024-02-28 Thread gpshead
https://github.com/python/cpython/commit/e7dc85308f810bf3bc1d131878a8b65386da4a55
commit: e7dc85308f810bf3bc1d131878a8b65386da4a55
branch: 3.11
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: gpshead 
date: 2024-02-28T20:53:00Z
summary:

[3.11] gh-115197: Stop resolving host in urllib.request proxy bypass (GH-115210)

gh-115197: Stop resolving host in urllib.request proxy bypass (GH-115210)

Use of a proxy is intended to defer DNS for the hosts to the proxy itself, 
rather than a potential for information leak of the host doing DNS resolution 
itself for any reason.  Proxy bypass lists are strictly name based.  Most 
implementations of proxy support agree.
(cherry picked from commit c43b26d02eaa103756c250e8d36829d388c5f3be)

Co-authored-by: Weii Wang 

files:
A Misc/NEWS.d/next/Library/2024-02-09-19-41-48.gh-issue-115197.20wkWH.rst
M Lib/test/test_urllib2.py
M Lib/urllib/request.py

diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 230b937645b53b..7229b9c9815fb0 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -14,10 +14,11 @@
 import subprocess
 
 import urllib.request
-# The proxy bypass method imported below has logic specific to the OSX
-# proxy config data structure but is testable on all platforms.
+# The proxy bypass method imported below has logic specific to the
+# corresponding system but is testable on all platforms.
 from urllib.request import (Request, OpenerDirector, HTTPBasicAuthHandler,
 HTTPPasswordMgrWithPriorAuth, _parse_proxy,
+_proxy_bypass_winreg_override,
 _proxy_bypass_macosx_sysconf,
 AbstractDigestAuthHandler)
 from urllib.parse import urlparse
@@ -1445,6 +1446,30 @@ def test_proxy_https_proxy_authorization(self):
 self.assertEqual(req.host, "proxy.example.com:3128")
 self.assertEqual(req.get_header("Proxy-authorization"), "FooBar")
 
+@unittest.skipUnless(os.name == "nt", "only relevant for Windows")
+def test_winreg_proxy_bypass(self):
+proxy_override = "www.example.com;*.example.net; 192.168.0.1"
+proxy_bypass = _proxy_bypass_winreg_override
+for host in ("www.example.com", "www.example.net", "192.168.0.1"):
+self.assertTrue(proxy_bypass(host, proxy_override),
+"expected bypass of %s to be true" % host)
+
+for host in ("example.com", "www.example.org", "example.net",
+ "192.168.0.2"):
+self.assertFalse(proxy_bypass(host, proxy_override),
+ "expected bypass of %s to be False" % host)
+
+# check intranet address bypass
+proxy_override = "example.com; "
+self.assertTrue(proxy_bypass("example.com", proxy_override),
+"expected bypass of %s to be true" % host)
+self.assertFalse(proxy_bypass("example.net", proxy_override),
+ "expected bypass of %s to be False" % host)
+for host in ("test", "localhost"):
+self.assertTrue(proxy_bypass(host, proxy_override),
+"expect  to bypass intranet address '%s'"
+% host)
+
 @unittest.skipUnless(sys.platform == 'darwin', "only relevant for OSX")
 def test_osx_proxy_bypass(self):
 bypass = {
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 24911bb0190b8e..d98ba5dd1983b9 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -2577,6 +2577,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
 }
 """
 from fnmatch import fnmatch
+from ipaddress import AddressValueError, IPv4Address
 
 hostonly, port = _splitport(host)
 
@@ -2593,20 +2594,17 @@ def ip2num(ipAddr):
 return True
 
 hostIP = None
+try:
+hostIP = int(IPv4Address(hostonly))
+except AddressValueError:
+pass
 
 for value in proxy_settings.get('exceptions', ()):
 # Items in the list are strings like these: *.local, 169.254/16
 if not value: continue
 
 m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
-if m is not None:
-if hostIP is None:
-try:
-hostIP = socket.gethostbyname(hostonly)
-hostIP = ip2num(hostIP)
-except OSError:
-continue
-
+if m is not None and hostIP is not None:
 base = ip2num(m.group(1))
 mask = m.group(2)
 if mask is None:
@@ -2629,6 +2627,31 @@ def ip2num(ipAddr):
 return False
 
 
+# Same as _proxy_bypass_macosx_sysconf, testable on all platforms
+def _proxy_bypass_winreg_override(host, override):
+"""Return True if the host should bypass the proxy server.
+
+The proxy override list is obtained from the Windows
+Internet settings proxy override registry value.
+
+An e

[Python-checkins] gh-115859: Re-enable T2 optimizer pass by default (#116062)

2024-02-28 Thread gvanrossum
https://github.com/python/cpython/commit/3409bc29c9f06051c28ae0791155e3aebd76ff2d
commit: 3409bc29c9f06051c28ae0791155e3aebd76ff2d
branch: main
author: Guido van Rossum 
committer: gvanrossum 
date: 2024-02-28T22:38:01Z
summary:

gh-115859: Re-enable T2 optimizer pass by default (#116062)

This undoes the *temporary* default disabling of the T2 optimizer pass in 
gh-115860.

- Add a new test that reproduces Brandt's example from gh-115859; it indeed 
crashes before gh-116028 with PYTHONUOPSOPTIMIZE=1
- Re-enable the optimizer pass in T2, stop checking PYTHONUOPSOPTIMIZE
- Rename the env var to disable T2 entirely to PYTHON_UOPS_OPTIMIZE (must be 
explicitly set to 0 to disable)
- Fix skipIf conditions on tests in test_opt.py accordingly
- Export sym_is_bottom() (for debugging)
- Fix various things in the `_BINARY_OP_` specializations in the abstract 
interpreter:
  - DECREF(temp)
  - out-of-space check after sym_new_const()
  - add sym_matches_type() checks, so even if we somehow reach a binary op with 
symbolic constants of the wrong type on the stack we won't trigger the type 
assert

files:
M Include/internal/pycore_optimizer.h
M Lib/test/test_capi/test_opt.py
M Python/optimizer.c
M Python/optimizer_analysis.c
M Python/optimizer_bytecodes.c
M Python/optimizer_cases.c.h
M Python/optimizer_symbols.c

diff --git a/Include/internal/pycore_optimizer.h 
b/Include/internal/pycore_optimizer.h
index 265eae4e290c38..614850468ec1d3 100644
--- a/Include/internal/pycore_optimizer.h
+++ b/Include/internal/pycore_optimizer.h
@@ -95,6 +95,8 @@ extern void _Py_uop_sym_set_null(_Py_UopsSymbol *sym);
 extern void _Py_uop_sym_set_non_null(_Py_UopsSymbol *sym);
 extern void _Py_uop_sym_set_type(_Py_UopsSymbol *sym, PyTypeObject *typ);
 extern void _Py_uop_sym_set_const(_Py_UopsSymbol *sym, PyObject *const_val);
+extern bool _Py_uop_sym_is_bottom(_Py_UopsSymbol *sym);
+
 
 extern int _Py_uop_abstractcontext_init(_Py_UOpsContext *ctx);
 extern void _Py_uop_abstractcontext_fini(_Py_UOpsContext *ctx);
diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py
index 25fc36dec93ddc..e1aef21b2c7644 100644
--- a/Lib/test/test_capi/test_opt.py
+++ b/Lib/test/test_capi/test_opt.py
@@ -210,6 +210,8 @@ def f():
 exe = get_first_executor(f)
 self.assertIsNone(exe)
 
+
+@unittest.skipIf(os.getenv("PYTHON_UOPS_OPTIMIZE") == "0", "Needs uop 
optimizer to run.")
 class TestUops(unittest.TestCase):
 
 def test_basic_loop(self):
@@ -570,7 +572,7 @@ def testfunc(n):
 self.assertLessEqual(count, 2)
 
 
-@unittest.skipIf(os.getenv("PYTHONUOPSOPTIMIZE", default=0) == 0, "Needs uop 
optimizer to run.")
+@unittest.skipIf(os.getenv("PYTHON_UOPS_OPTIMIZE") == "0", "Needs uop 
optimizer to run.")
 class TestUopsOptimization(unittest.TestCase):
 
 def _run_with_optimizer(self, testfunc, arg):
@@ -890,5 +892,22 @@ def testfunc(n):
 self.assertLessEqual(len(guard_both_float_count), 1)
 self.assertIn("_COMPARE_OP_STR", uops)
 
+def test_type_inconsistency(self):
+def testfunc(n):
+for i in range(n):
+x = _test_global + _test_global
+# Must be a real global else it won't be optimized to 
_LOAD_CONST_INLINE
+global _test_global
+_test_global = 0
+_, ex = self._run_with_optimizer(testfunc, 16)
+self.assertIsNone(ex)
+_test_global = 1.2
+_, ex = self._run_with_optimizer(testfunc, 16)
+self.assertIsNotNone(ex)
+uops = get_opnames(ex)
+self.assertIn("_GUARD_BOTH_INT", uops)
+self.assertIn("_BINARY_OP_ADD_INT", uops)
+
+
 if __name__ == "__main__":
 unittest.main()
diff --git a/Python/optimizer.c b/Python/optimizer.c
index c04ee17ee2171d..acd6d52c4a885f 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -1008,8 +1008,8 @@ uop_optimize(
 return err;
 }
 OPT_STAT_INC(traces_created);
-char *uop_optimize = Py_GETENV("PYTHONUOPSOPTIMIZE");
-if (uop_optimize == NULL || *uop_optimize > '0') {
+char *env_var = Py_GETENV("PYTHON_UOPS_OPTIMIZE");
+if (env_var == NULL || *env_var == '\0' || *env_var > '0') {
 err = _Py_uop_analyze_and_optimize(frame, buffer,
UOP_MAX_TRACE_LENGTH,
curr_stackentries, &dependencies);
diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c
index 8e408ffbb1c2b5..2a7ef4ec919eeb 100644
--- a/Python/optimizer_analysis.c
+++ b/Python/optimizer_analysis.c
@@ -297,6 +297,7 @@ remove_globals(_PyInterpreterFrame *frame, 
_PyUOpInstruction *buffer,
 #define sym_set_non_null _Py_uop_sym_set_non_null
 #define sym_set_type _Py_uop_sym_set_type
 #define sym_set_const _Py_uop_sym_set_const
+#define sym_is_bottom _Py_uop_sym_is_bottom
 #define frame_new _Py_uop_frame_new
 #define frame_pop _Py_uop_frame_pop
 
@@ -510,12 +511,9 @@ _Py_uop_analyze_and_optimize(
 
 peephole_opt(frame, buffer, buffer_size);
 
-

[Python-checkins] gh-112075: Use relaxed stores for places where we may race with when reading lock-free (#115786)

2024-02-28 Thread DinoV
https://github.com/python/cpython/commit/81c79961d2ee27dec90dbc0b72dfca7a5b27de7a
commit: 81c79961d2ee27dec90dbc0b72dfca7a5b27de7a
branch: main
author: Dino Viehland 
committer: DinoV 
date: 2024-02-28T14:53:19-08:00
summary:

gh-112075: Use relaxed stores for places where we may race with when reading 
lock-free (#115786)

files:
M Objects/dictobject.c

diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index 58fe973bc7a036..5016e255f70ef9 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -250,6 +250,14 @@ load_keys_nentries(PyDictObject *mp)
 
 #endif
 
+#define STORE_KEY(ep, key) FT_ATOMIC_STORE_PTR_RELEASE(ep->me_key, key)
+#define STORE_VALUE(ep, value) FT_ATOMIC_STORE_PTR_RELEASE(ep->me_value, value)
+#define STORE_SPLIT_VALUE(mp, idx, value) 
FT_ATOMIC_STORE_PTR_RELEASE(mp->ma_values->values[idx], value)
+#define STORE_HASH(ep, hash) FT_ATOMIC_STORE_SSIZE_RELAXED(ep->me_hash, hash)
+#define STORE_KEYS_USABLE(keys, usable) 
FT_ATOMIC_STORE_SSIZE_RELAXED(keys->dk_usable, usable)
+#define STORE_KEYS_NENTRIES(keys, nentries) 
FT_ATOMIC_STORE_SSIZE_RELAXED(keys->dk_nentries, nentries)
+#define STORE_USED(mp, used) FT_ATOMIC_STORE_SSIZE_RELAXED(mp->ma_used, used)
+
 #define PERTURB_SHIFT 5
 
 /*
@@ -1621,7 +1629,6 @@ insert_into_splitdictkeys(PyDictKeysObject *keys, 
PyObject *name)
 return ix;
 }
 
-
 static inline int
 insert_combined_dict(PyInterpreterState *interp, PyDictObject *mp,
  Py_hash_t hash, PyObject *key, PyObject *value)
@@ -1639,18 +1646,18 @@ insert_combined_dict(PyInterpreterState *interp, 
PyDictObject *mp,
 if (DK_IS_UNICODE(mp->ma_keys)) {
 PyDictUnicodeEntry *ep;
 ep = &DK_UNICODE_ENTRIES(mp->ma_keys)[mp->ma_keys->dk_nentries];
-ep->me_key = key;
-ep->me_value = value;
+STORE_KEY(ep, key);
+STORE_VALUE(ep, value);
 }
 else {
 PyDictKeyEntry *ep;
 ep = &DK_ENTRIES(mp->ma_keys)[mp->ma_keys->dk_nentries];
-ep->me_key = key;
-ep->me_hash = hash;
-ep->me_value = value;
+STORE_KEY(ep, key);
+STORE_VALUE(ep, value);
+STORE_HASH(ep, hash);
 }
-mp->ma_keys->dk_usable--;
-mp->ma_keys->dk_nentries++;
+STORE_KEYS_USABLE(mp->ma_keys, mp->ma_keys->dk_usable - 1);
+STORE_KEYS_NENTRIES(mp->ma_keys, mp->ma_keys->dk_nentries + 1);
 assert(mp->ma_keys->dk_usable >= 0);
 return 0;
 }
@@ -1682,7 +1689,7 @@ insert_split_dict(PyInterpreterState *interp, 
PyDictObject *mp,
 Py_ssize_t index = keys->dk_nentries;
 _PyDictValues_AddToInsertionOrder(mp->ma_values, index);
 assert (mp->ma_values->values[index] == NULL);
-mp->ma_values->values[index] = value;
+STORE_SPLIT_VALUE(mp, index, value);
 
 split_keys_entry_added(keys);
 assert(keys->dk_usable >= 0);
@@ -2013,8 +2020,8 @@ dictresize(PyInterpreterState *interp, PyDictObject *mp,
 }
 }
 
-mp->ma_keys->dk_usable -= numentries;
-mp->ma_keys->dk_nentries = numentries;
+STORE_KEYS_USABLE(mp->ma_keys, mp->ma_keys->dk_usable - numentries);
+STORE_KEYS_NENTRIES(mp->ma_keys, numentries);
 ASSERT_CONSISTENT(mp);
 return 0;
 }
@@ -2507,15 +2514,15 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, 
Py_ssize_t ix,
 if (DK_IS_UNICODE(mp->ma_keys)) {
 PyDictUnicodeEntry *ep = &DK_UNICODE_ENTRIES(mp->ma_keys)[ix];
 old_key = ep->me_key;
-ep->me_key = NULL;
-ep->me_value = NULL;
+STORE_KEY(ep, NULL);
+STORE_VALUE(ep, NULL);
 }
 else {
 PyDictKeyEntry *ep = &DK_ENTRIES(mp->ma_keys)[ix];
 old_key = ep->me_key;
-ep->me_key = NULL;
-ep->me_value = NULL;
-ep->me_hash = 0;
+STORE_KEY(ep, NULL);
+STORE_VALUE(ep, NULL);
+STORE_HASH(ep, 0);
 }
 Py_DECREF(old_key);
 }
@@ -4393,8 +4400,8 @@ dict_popitem_impl(PyDictObject *self)
 PyTuple_SET_ITEM(res, 0, key);
 PyTuple_SET_ITEM(res, 1, value);
 /* We can't dk_usable++ since there is DKIX_DUMMY in indices */
-self->ma_keys->dk_nentries = i;
-self->ma_used--;
+STORE_KEYS_NENTRIES(self->ma_keys, i);
+STORE_USED(self, self->ma_used - 1);
 self->ma_version_tag = new_version;
 ASSERT_CONSISTENT(self);
 return res;

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] Improve all_equal() recipe (gh-116081)

2024-02-28 Thread rhettinger
https://github.com/python/cpython/commit/67c19e57b5c928278ebd191a545979ce786f06b3
commit: 67c19e57b5c928278ebd191a545979ce786f06b3
branch: main
author: Raymond Hettinger 
committer: rhettinger 
date: 2024-02-28T17:04:56-06:00
summary:

Improve all_equal() recipe (gh-116081)

Replace conjuction of next() calls with simpler len()/take() logic. Add key 
function.

files:
M Doc/library/itertools.rst

diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 42e70404b306b0..4e731fefe8908d 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -863,10 +863,9 @@ which incur interpreter overhead.
"Given a predicate that returns True or False, count the True results."
return sum(map(pred, iterable))
 
-   def all_equal(iterable):
+   def all_equal(iterable, key=None):
"Returns True if all the elements are equal to each other."
-   g = groupby(iterable)
-   return next(g, True) and not next(g, False)
+   return len(take(2, groupby(iterable, key))) <= 1
 
def first_true(iterable, default=False, pred=None):
"""Returns the first true value in the iterable.
@@ -1225,6 +1224,8 @@ The following recipes have a more mathematical flavor:
 
 >>> [all_equal(s) for s in ('', 'A', '', 'AAAB', 'AAABA')]
 [True, True, True, False, False]
+>>> [all_equal(s, key=str.casefold) for s in ('', 'A', 'AaAa', 'AAAB', 
'AAABA')]
+[True, True, True, False, False]
 
 >>> quantify(range(99), lambda x: x%2==0)
 50

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-76785: Update test.support.interpreters to Align With PEP 734 (gh-115566)

2024-02-28 Thread ericsnowcurrently
https://github.com/python/cpython/commit/e80abd57a82ea1beae0a82423d45c6eb8c5c5c74
commit: e80abd57a82ea1beae0a82423d45c6eb8c5c5c74
branch: main
author: Eric Snow 
committer: ericsnowcurrently 
date: 2024-02-28T16:08:08-07:00
summary:

gh-76785: Update test.support.interpreters to Align With PEP 734 (gh-115566)

This brings the code under test.support.interpreters, and the corresponding 
extension modules, in line with recent updates to PEP 734.

(Note: PEP 734 has not been accepted at this time.  However, we are using an 
internal copy of the implementation in the test suite to exercise the existing 
subinterpreters feature.)

files:
M Lib/test/support/interpreters/__init__.py
M Lib/test/support/interpreters/queues.py
M Lib/test/test_interpreters/test_api.py
M Lib/test/test_interpreters/test_channels.py
M Lib/test/test_interpreters/test_lifecycle.py
M Lib/test/test_interpreters/test_queues.py
M Lib/test/test_interpreters/utils.py
M Lib/test/test_sys.py
M Lib/test/test_threading.py
M Modules/_xxinterpqueuesmodule.c
M Modules/_xxsubinterpretersmodule.c

diff --git a/Lib/test/support/interpreters/__init__.py 
b/Lib/test/support/interpreters/__init__.py
index 15a908e9663593..d02ffbae1113c0 100644
--- a/Lib/test/support/interpreters/__init__.py
+++ b/Lib/test/support/interpreters/__init__.py
@@ -6,7 +6,7 @@
 
 # aliases:
 from _xxsubinterpreters import (
-InterpreterError, InterpreterNotFoundError,
+InterpreterError, InterpreterNotFoundError, NotShareableError,
 is_shareable,
 )
 
@@ -14,7 +14,8 @@
 __all__ = [
 'get_current', 'get_main', 'create', 'list_all', 'is_shareable',
 'Interpreter',
-'InterpreterError', 'InterpreterNotFoundError', 'ExecFailure',
+'InterpreterError', 'InterpreterNotFoundError', 'ExecutionFailed',
+'NotShareableError',
 'create_queue', 'Queue', 'QueueEmpty', 'QueueFull',
 ]
 
@@ -42,7 +43,11 @@ def __getattr__(name):
 {formatted}
 """.strip()
 
-class ExecFailure(RuntimeError):
+class ExecutionFailed(RuntimeError):
+"""An unhandled exception happened during execution.
+
+This is raised from Interpreter.exec() and Interpreter.call().
+"""
 
 def __init__(self, excinfo):
 msg = excinfo.formatted
@@ -157,7 +162,7 @@ def prepare_main(self, ns=None, /, **kwargs):
 ns = dict(ns, **kwargs) if ns is not None else kwargs
 _interpreters.set___main___attrs(self._id, ns)
 
-def exec_sync(self, code, /):
+def exec(self, code, /):
 """Run the given source code in the interpreter.
 
 This is essentially the same as calling the builtin "exec"
@@ -166,10 +171,10 @@ def exec_sync(self, code, /):
 
 There is no return value.
 
-If the code raises an unhandled exception then an ExecFailure
-is raised, which summarizes the unhandled exception.  The actual
-exception is discarded because objects cannot be shared between
-interpreters.
+If the code raises an unhandled exception then an ExecutionFailed
+exception is raised, which summarizes the unhandled exception.
+The actual exception is discarded because objects cannot be
+shared between interpreters.
 
 This blocks the current Python thread until done.  During
 that time, the previous interpreter is allowed to run
@@ -177,11 +182,35 @@ def exec_sync(self, code, /):
 """
 excinfo = _interpreters.exec(self._id, code)
 if excinfo is not None:
-raise ExecFailure(excinfo)
+raise ExecutionFailed(excinfo)
+
+def call(self, callable, /):
+"""Call the object in the interpreter with given args/kwargs.
+
+Only functions that take no arguments and have no closure
+are supported.
 
-def run(self, code, /):
+The return value is discarded.
+
+If the callable raises an exception then the error display
+(including full traceback) is send back between the interpreters
+and an ExecutionFailed exception is raised, much like what
+happens with Interpreter.exec().
+"""
+# XXX Support args and kwargs.
+# XXX Support arbitrary callables.
+# XXX Support returning the return value (e.g. via pickle).
+excinfo = _interpreters.call(self._id, callable)
+if excinfo is not None:
+raise ExecutionFailed(excinfo)
+
+def call_in_thread(self, callable, /):
+"""Return a new thread that calls the object in the interpreter.
+
+The return value and any raised exception are discarded.
+"""
 def task():
-self.exec_sync(code)
+self.call(callable)
 t = threading.Thread(target=task)
 t.start()
 return t
diff --git a/Lib/test/support/interpreters/queues.py 
b/Lib/test/support/interpreters/queues.py
index aead0c40ca9667..2cc616be337a50 100644
--- a/Lib/test/support/interpreters/queues.py
+++ b/Lib/test/support/interpreters/queues.py
@@ -1,5 +1,6 @@
 """Cross-in

[Python-checkins] Update an out-of-date example in the itertools recipe intro (gh-116082)

2024-02-28 Thread rhettinger
https://github.com/python/cpython/commit/f484a2a7486d0b4c7c11901f6c668eb23b74e81f
commit: f484a2a7486d0b4c7c11901f6c668eb23b74e81f
branch: main
author: Raymond Hettinger 
committer: rhettinger 
date: 2024-02-28T17:11:05-06:00
summary:

Update an out-of-date example in the itertools recipe intro (gh-116082)

files:
M Doc/library/itertools.rst

diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 4e731fefe8908d..c26f6c89b4920a 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -778,7 +778,7 @@ The primary purpose of the itertools recipes is 
educational.  The recipes show
 various ways of thinking about individual tools — for example, that
 ``chain.from_iterable`` is related to the concept of flattening.  The recipes
 also give ideas about ways that the tools can be combined — for example, how
-``compress()`` and ``range()`` can work together.  The recipes also show 
patterns
+``starmap()`` and ``repeat()`` can work together.  The recipes also show 
patterns
 for using itertools with the :mod:`operator` and :mod:`collections` modules as
 well as with the built-in itertools such as ``map()``, ``filter()``,
 ``reversed()``, and ``enumerate()``.

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] [3.12] Improve all_equal() recipe (gh-116081) (gh-116083)

2024-02-28 Thread rhettinger
https://github.com/python/cpython/commit/a85e512699512893f34329b9ddab5ec5ad8a19bb
commit: a85e512699512893f34329b9ddab5ec5ad8a19bb
branch: 3.12
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: rhettinger 
date: 2024-02-28T17:11:46-06:00
summary:

[3.12] Improve all_equal() recipe (gh-116081) (gh-116083)

files:
M Doc/library/itertools.rst

diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 8ae7a304ff12f4..82d24a752a4aaa 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -855,10 +855,9 @@ which incur interpreter overhead.
"Given a predicate that returns True or False, count the True results."
return sum(map(pred, iterable))
 
-   def all_equal(iterable):
+   def all_equal(iterable, key=None):
"Returns True if all the elements are equal to each other."
-   g = groupby(iterable)
-   return next(g, True) and not next(g, False)
+   return len(take(2, groupby(iterable, key))) <= 1
 
def first_true(iterable, default=False, pred=None):
"""Returns the first true value in the iterable.
@@ -1217,6 +1216,8 @@ The following recipes have a more mathematical flavor:
 
 >>> [all_equal(s) for s in ('', 'A', '', 'AAAB', 'AAABA')]
 [True, True, True, False, False]
+>>> [all_equal(s, key=str.casefold) for s in ('', 'A', 'AaAa', 'AAAB', 
'AAABA')]
+[True, True, True, False, False]
 
 >>> quantify(range(99), lambda x: x%2==0)
 50

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-116075: Skip test_external_inspection on qemu in JIT CI (#116076)

2024-02-28 Thread erlend-aasland
https://github.com/python/cpython/commit/4d1d35b906010c6db15f54443a9701c20af1db2d
commit: 4d1d35b906010c6db15f54443a9701c20af1db2d
branch: main
author: Erlend E. Aasland 
committer: erlend-aasland 
date: 2024-02-28T23:16:01Z
summary:

gh-116075: Skip test_external_inspection on qemu in JIT CI (#116076)

files:
M .github/workflows/jit.yml

diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml
index 21d4603b8679ea..43d0b2c1b4016c 100644
--- a/.github/workflows/jit.yml
+++ b/.github/workflows/jit.yml
@@ -70,13 +70,13 @@ jobs:
 runner: ubuntu-latest
 compiler: gcc
 # These fail because of emulation, not because of the JIT:
-exclude: test_unix_events test_init test_process_pool 
test_shutdown test_multiprocessing_fork test_cmd_line test_faulthandler test_os 
test_perf_profiler test_posix test_signal test_socket test_subprocess 
test_threading test_venv
+exclude: test_unix_events test_init test_process_pool 
test_shutdown test_multiprocessing_fork test_cmd_line test_faulthandler test_os 
test_perf_profiler test_posix test_signal test_socket test_subprocess 
test_threading test_venv test_external_inspection
   - target: aarch64-unknown-linux-gnu/clang
 architecture: aarch64
 runner: ubuntu-latest
 compiler: clang
 # These fail because of emulation, not because of the JIT:
-exclude: test_unix_events test_init test_process_pool 
test_shutdown test_multiprocessing_fork test_cmd_line test_faulthandler test_os 
test_perf_profiler test_posix test_signal test_socket test_subprocess 
test_threading test_venv
+exclude: test_unix_events test_init test_process_pool 
test_shutdown test_multiprocessing_fork test_cmd_line test_faulthandler test_os 
test_perf_profiler test_posix test_signal test_socket test_subprocess 
test_threading test_venv test_external_inspection
 env:
   CC: ${{ matrix.compiler }}
 steps:

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] [3.12] gh-115821: [Enum] better error message for calling super().__new__() (GH-116065)

2024-02-28 Thread ethanfurman
https://github.com/python/cpython/commit/83ff92d225816bc23073e9d5a57cc117a4f25714
commit: 83ff92d225816bc23073e9d5a57cc117a4f25714
branch: 3.12
author: Ethan Furman 
committer: ethanfurman 
date: 2024-02-28T15:17:20-08:00
summary:

[3.12] gh-115821: [Enum] better error message for calling super().__new__() 
(GH-116065)

Add note to not call super().__new__() in docs.

files:
M Doc/library/enum.rst

diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst
index 4719c462a49c55..a03961b9a1ebce 100644
--- a/Doc/library/enum.rst
+++ b/Doc/library/enum.rst
@@ -390,6 +390,9 @@ Data Types
 
   results in the call ``int('1a', 16)`` and a value of ``17`` for the 
member.
 
+  ..note:: When writing a custom ``__new__``, do not use 
``super().__new__`` --
+   call the appropriate ``__new__`` instead.
+
.. method:: Enum.__repr__(self)
 
   Returns the string used for *repr()* calls.  By default, returns the

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-115821: [Enum] better error message for calling super().__new__() (GH-116063)

2024-02-28 Thread ethanfurman
https://github.com/python/cpython/commit/3ea78fd5bc93fc339ef743e6a5dfde35f04d972e
commit: 3ea78fd5bc93fc339ef743e6a5dfde35f04d972e
branch: main
author: Ethan Furman 
committer: ethanfurman 
date: 2024-02-28T15:17:49-08:00
summary:

gh-115821: [Enum] better error message for calling super().__new__() (GH-116063)

docs now state to not call super().__new__
if super().__new__ is called, a better error message is now used

files:
A Misc/NEWS.d/next/Library/2024-02-28-12-14-31.gh-issue-115821.YO2vKA.rst
M Doc/library/enum.rst
M Lib/enum.py
M Lib/test/test_enum.py

diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst
index 30d80ce8d488cc..6e7de004cd52a1 100644
--- a/Doc/library/enum.rst
+++ b/Doc/library/enum.rst
@@ -400,6 +400,9 @@ Data Types
 
   results in the call ``int('1a', 16)`` and a value of ``17`` for the 
member.
 
+  ..note:: When writing a custom ``__new__``, do not use 
``super().__new__`` --
+   call the appropriate ``__new__`` instead.
+
.. method:: Enum.__repr__(self)
 
   Returns the string used for *repr()* calls.  By default, returns the
diff --git a/Lib/enum.py b/Lib/enum.py
index d10b99615981ba..22963cca4466f2 100644
--- a/Lib/enum.py
+++ b/Lib/enum.py
@@ -547,7 +547,10 @@ def __new__(metacls, cls, bases, classdict, *, 
boundary=None, _simple=False, **k
 classdict['_inverted_'] = None
 try:
 exc = None
+classdict['_%s__in_progress' % cls] = True
 enum_class = super().__new__(metacls, cls, bases, classdict, 
**kwds)
+classdict['_%s__in_progress' % cls] = False
+delattr(enum_class, '_%s__in_progress' % cls)
 except Exception as e:
 # since 3.12 the line "Error calling __set_name__ on 
'_proto_member' instance ..."
 # is tacked on to the error instead of raising a RuntimeError
@@ -1155,6 +1158,8 @@ def __new__(cls, value):
 # still not found -- verify that members exist, in-case somebody got 
here mistakenly
 # (such as via super when trying to override __new__)
 if not cls._member_map_:
+if getattr(cls, '_%s__in_progress' % cls.__name__, False):
+raise TypeError('do not use `super().__new__; call the 
appropriate __new__ directly') from None
 raise TypeError("%r has no members defined" % cls)
 #
 # still not found -- try _missing_ hook
diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py
index cf3e042de1a4b4..27f8bbaf952afc 100644
--- a/Lib/test/test_enum.py
+++ b/Lib/test/test_enum.py
@@ -447,7 +447,7 @@ def spam(cls):
 def test_bad_new_super(self):
 with self.assertRaisesRegex(
 TypeError,
-'has no members defined',
+'do not use .super...__new__;',
 ):
 class BadSuper(self.enum_type):
 def __new__(cls, value):
@@ -3409,6 +3409,17 @@ def __new__(cls, int_value, *value_aliases):
 self.assertIs(Types(2), Types.NetList)
 self.assertIs(Types('nl'), Types.NetList)
 
+def test_no_members(self):
+with self.assertRaisesRegex(
+TypeError,
+'has no members',
+):
+Enum(7)
+with self.assertRaisesRegex(
+TypeError,
+'has no members',
+):
+Flag(7)
 
 class TestOrder(unittest.TestCase):
 "test usage of the `_order_` attribute"
diff --git 
a/Misc/NEWS.d/next/Library/2024-02-28-12-14-31.gh-issue-115821.YO2vKA.rst 
b/Misc/NEWS.d/next/Library/2024-02-28-12-14-31.gh-issue-115821.YO2vKA.rst
new file mode 100644
index 00..7512a09a37cd46
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-02-28-12-14-31.gh-issue-115821.YO2vKA.rst
@@ -0,0 +1,2 @@
+[Enum] Improve error message when calling super().__new__() in custom
+__new__.

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] [3.12] Update an out-of-date example in the itertools recipe intro (gh-116082) (gh-116084)

2024-02-28 Thread rhettinger
https://github.com/python/cpython/commit/d4a1c8e62817bff5cb8b86b5b387c36bcafa81da
commit: d4a1c8e62817bff5cb8b86b5b387c36bcafa81da
branch: 3.12
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: rhettinger 
date: 2024-02-28T17:19:17-06:00
summary:

[3.12] Update an out-of-date example in the itertools recipe intro (gh-116082) 
(gh-116084)

files:
M Doc/library/itertools.rst

diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 82d24a752a4aaa..f265e046afc85a 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -770,7 +770,7 @@ The primary purpose of the itertools recipes is 
educational.  The recipes show
 various ways of thinking about individual tools — for example, that
 ``chain.from_iterable`` is related to the concept of flattening.  The recipes
 also give ideas about ways that the tools can be combined — for example, how
-``compress()`` and ``range()`` can work together.  The recipes also show 
patterns
+``starmap()`` and ``repeat()`` can work together.  The recipes also show 
patterns
 for using itertools with the :mod:`operator` and :mod:`collections` modules as
 well as with the built-in itertools such as ``map()``, ``filter()``,
 ``reversed()``, and ``enumerate()``.

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-115859: Fix test_type_inconsistency() when run multiple times (#116079)

2024-02-28 Thread gvanrossum
https://github.com/python/cpython/commit/479ac5ce8a311c9a5830b96e972478867fcbce61
commit: 479ac5ce8a311c9a5830b96e972478867fcbce61
branch: main
author: Guido van Rossum 
committer: gvanrossum 
date: 2024-02-28T23:56:58Z
summary:

gh-115859: Fix test_type_inconsistency() when run multiple times (#116079)

This should fix the refleaks bots.

(See https://github.com/python/cpython/pull/116062#issuecomment-1970038174 .)

files:
M Lib/test/test_capi/test_opt.py

diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py
index e1aef21b2c7644..a43726f05a448d 100644
--- a/Lib/test/test_capi/test_opt.py
+++ b/Lib/test/test_capi/test_opt.py
@@ -893,9 +893,13 @@ def testfunc(n):
 self.assertIn("_COMPARE_OP_STR", uops)
 
 def test_type_inconsistency(self):
-def testfunc(n):
-for i in range(n):
-x = _test_global + _test_global
+ns = {}
+exec(textwrap.dedent("""
+def testfunc(n):
+for i in range(n):
+x = _test_global + _test_global
+"""), globals(), ns)
+testfunc = ns['testfunc']
 # Must be a real global else it won't be optimized to 
_LOAD_CONST_INLINE
 global _test_global
 _test_global = 0

___
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com


[Python-checkins] gh-115816: Generate calls to sym_new_const() etc. without _Py_uop prefix (#116077)

2024-02-28 Thread gvanrossum
https://github.com/python/cpython/commit/86e5e063aba76a7f4fc58f7d06b17b0a4730fd8e
commit: 86e5e063aba76a7f4fc58f7d06b17b0a4730fd8e
branch: main
author: Guido van Rossum 
committer: gvanrossum 
date: 2024-02-29T00:05:53Z
summary:

gh-115816: Generate calls to sym_new_const() etc. without _Py_uop prefix 
(#116077)

This was left behind by GH-115987. Basically a lot of diffs like this:
```
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
```

files:
M Lib/test/test_generated_cases.py
M Python/optimizer_cases.c.h
M Tools/cases_generator/optimizer_generator.py

diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py
index 18bf8ab29148c4..6fcb5d58dd7f34 100644
--- a/Lib/test/test_generated_cases.py
+++ b/Lib/test/test_generated_cases.py
@@ -900,7 +900,7 @@ def test_overridden_abstract_args(self):
 
 case OP2: {
 _Py_UopsSymbol *out;
-out = _Py_uop_sym_new_unknown(ctx);
+out = sym_new_unknown(ctx);
 if (out == NULL) goto out_of_space;
 stack_pointer[-1] = out;
 break;
@@ -925,7 +925,7 @@ def test_no_overridden_case(self):
 output = """
 case OP: {
 _Py_UopsSymbol *out;
-out = _Py_uop_sym_new_unknown(ctx);
+out = sym_new_unknown(ctx);
 if (out == NULL) goto out_of_space;
 stack_pointer[-1] = out;
 break;
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 9b387c07850245..b38c03bed4b4d0 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -80,7 +80,7 @@
 
 case _END_SEND: {
 _Py_UopsSymbol *value;
-value = _Py_uop_sym_new_unknown(ctx);
+value = sym_new_unknown(ctx);
 if (value == NULL) goto out_of_space;
 stack_pointer[-2] = value;
 stack_pointer += -1;
@@ -89,7 +89,7 @@
 
 case _UNARY_NEGATIVE: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -97,7 +97,7 @@
 
 case _UNARY_NOT: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -105,7 +105,7 @@
 
 case _TO_BOOL: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -117,7 +117,7 @@
 
 case _TO_BOOL_INT: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -125,7 +125,7 @@
 
 case _TO_BOOL_LIST: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -133,7 +133,7 @@
 
 case _TO_BOOL_NONE: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -141,7 +141,7 @@
 
 case _TO_BOOL_STR: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -149,7 +149,7 @@
 
 case _TO_BOOL_ALWAYS_TRUE: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -157,7 +157,7 @@
 
 case _UNARY_INVERT: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-1] = res;
 break;
@@ -390,7 +390,7 @@
 
 case _BINARY_OP_ADD_UNICODE: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) goto out_of_space;
 stack_pointer[-2] = res;
 stack_pointer += -1;
@@ -399,7 +399,7 @@
 
 case _BINARY_SUBSCR: {
 _Py_UopsSymbol *res;
-res = _Py_uop_sym_new_unknown(ctx);
+res = sym_new_unknown(ctx);
 if (res == NULL) go

[Python-checkins] [3.12] gh-114572: Fix locking in cert_store_stats and get_ca_certs (GH-114573) (GH-115547)

2024-02-28 Thread encukou
https://github.com/python/cpython/commit/542f3272f56f31ed04e74c40635a913fbc12d286
commit: 542f3272f56f31ed04e74c40635a913fbc12d286
branch: 3.12
author: Miss Islington (bot) <31488909+miss-isling...@users.noreply.github.com>
committer: encukou 
date: 2024-02-29T08:53:56+01:00
summary:

[3.12] gh-114572: Fix locking in cert_store_stats and get_ca_certs (GH-114573) 
(GH-115547)

gh-114572: Fix locking in cert_store_stats and get_ca_certs (GH-114573)

* gh-114572: Fix locking in cert_store_stats and get_ca_certs

cert_store_stats and get_ca_certs query the SSLContext's X509_STORE with
X509_STORE_get0_objects, but reading the result requires a lock. See
https://github.com/openssl/openssl/pull/23224 for details.

Instead, use X509_STORE_get1_objects, newly added in that PR.
X509_STORE_get1_objects does not exist in current OpenSSLs, but we can
polyfill it with X509_STORE_lock and X509_STORE_unlock.

* Work around const-correctness problem

* Add missing X509_STORE_get1_objects failure check

* Add blurb
(cherry picked from commit bce693111bff906ccf9281c22371331aaff766ab)

Co-authored-by: David Benjamin 

files:
A Misc/NEWS.d/next/Security/2024-01-26-22-14-09.gh-issue-114572.t1QMQD.rst
M Modules/_ssl.c

diff --git 
a/Misc/NEWS.d/next/Security/2024-01-26-22-14-09.gh-issue-114572.t1QMQD.rst 
b/Misc/NEWS.d/next/Security/2024-01-26-22-14-09.gh-issue-114572.t1QMQD.rst
new file mode 100644
index 00..b4f9fe64db0615
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2024-01-26-22-14-09.gh-issue-114572.t1QMQD.rst
@@ -0,0 +1,4 @@
+:meth:`ssl.SSLContext.cert_store_stats` and
+:meth:`ssl.SSLContext.get_ca_certs` now correctly lock access to the
+certificate store, when the :class:`ssl.SSLContext` is shared across
+multiple threads.
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index b602eb04c795a9..5f1425ae464059 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -4520,6 +4520,50 @@ set_sni_callback(PySSLContext *self, PyObject *arg, void 
*c)
 return 0;
 }
 
+#if OPENSSL_VERSION_NUMBER < 0x3030L
+static X509_OBJECT *x509_object_dup(const X509_OBJECT *obj)
+{
+int ok;
+X509_OBJECT *ret = X509_OBJECT_new();
+if (ret == NULL) {
+return NULL;
+}
+switch (X509_OBJECT_get_type(obj)) {
+case X509_LU_X509:
+ok = X509_OBJECT_set1_X509(ret, X509_OBJECT_get0_X509(obj));
+break;
+case X509_LU_CRL:
+/* X509_OBJECT_get0_X509_CRL was not const-correct prior to 3.0.*/
+ok = X509_OBJECT_set1_X509_CRL(
+ret, X509_OBJECT_get0_X509_CRL((X509_OBJECT *)obj));
+break;
+default:
+/* We cannot duplicate unrecognized types in a polyfill, but it is
+ * safe to leave an empty object. The caller will ignore it. */
+ok = 1;
+break;
+}
+if (!ok) {
+X509_OBJECT_free(ret);
+return NULL;
+}
+return ret;
+}
+
+static STACK_OF(X509_OBJECT) *
+X509_STORE_get1_objects(X509_STORE *store)
+{
+STACK_OF(X509_OBJECT) *ret;
+if (!X509_STORE_lock(store)) {
+return NULL;
+}
+ret = sk_X509_OBJECT_deep_copy(X509_STORE_get0_objects(store),
+   x509_object_dup, X509_OBJECT_free);
+X509_STORE_unlock(store);
+return ret;
+}
+#endif
+
 PyDoc_STRVAR(PySSLContext_sni_callback_doc,
 "Set a callback that will be called when a server name is provided by the 
SSL/TLS client in the SNI extension.\n\
 \n\
@@ -4549,7 +4593,12 @@ _ssl__SSLContext_cert_store_stats_impl(PySSLContext 
*self)
 int x509 = 0, crl = 0, ca = 0, i;
 
 store = SSL_CTX_get_cert_store(self->ctx);
-objs = X509_STORE_get0_objects(store);
+objs = X509_STORE_get1_objects(store);
+if (objs == NULL) {
+PyErr_SetString(PyExc_MemoryError, "failed to query cert store");
+return NULL;
+}
+
 for (i = 0; i < sk_X509_OBJECT_num(objs); i++) {
 obj = sk_X509_OBJECT_value(objs, i);
 switch (X509_OBJECT_get_type(obj)) {
@@ -4563,12 +4612,11 @@ _ssl__SSLContext_cert_store_stats_impl(PySSLContext 
*self)
 crl++;
 break;
 default:
-/* Ignore X509_LU_FAIL, X509_LU_RETRY, X509_LU_PKEY.
- * As far as I can tell they are internal states and never
- * stored in a cert store */
+/* Ignore unrecognized types. */
 break;
 }
 }
+sk_X509_OBJECT_pop_free(objs, X509_OBJECT_free);
 return Py_BuildValue("{sisisi}", "x509", x509, "crl", crl,
 "x509_ca", ca);
 }
@@ -4600,7 +4648,12 @@ _ssl__SSLContext_get_ca_certs_impl(PySSLContext *self, 
int binary_form)
 }
 
 store = SSL_CTX_get_cert_store(self->ctx);
-objs = X509_STORE_get0_objects(store);
+objs = X509_STORE_get1_objects(store);
+if (objs == NULL) {
+PyErr_SetString(PyExc_MemoryError, "failed to query cert store");
+goto error;
+}
+
 for (i = 0; i < sk_X509_OBJEC