diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 6f8fe005621c88..98ab4008bed7cf 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -6,7 +6,7 @@ ENV WASI_SDK_VERSION=21 ENV WASI_SDK_PATH=/opt/wasi-sdk ENV WASMTIME_HOME=/opt/wasmtime -ENV WASMTIME_VERSION=18.0.3 +ENV WASMTIME_VERSION=22.0.0 ENV WASMTIME_CPU_ARCH=x86_64 RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \ diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e8f4a4693a814c..95e30ac3001c9c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -72,6 +72,7 @@ Include/internal/pycore_freelist.h @ericsnowcurrently Include/internal/pycore_global_objects.h @ericsnowcurrently Include/internal/pycore_obmalloc.h @ericsnowcurrently Include/internal/pycore_pymem.h @ericsnowcurrently +Include/internal/pycore_stackref.h @Fidget-Spinner Modules/main.c @ericsnowcurrently Programs/_bootstrap_python.c @ericsnowcurrently Programs/python.c @ericsnowcurrently diff --git a/.github/workflows/reusable-wasi.yml b/.github/workflows/reusable-wasi.yml index c389fe9e173b38..db6c04ec2ac1c5 100644 --- a/.github/workflows/reusable-wasi.yml +++ b/.github/workflows/reusable-wasi.yml @@ -11,7 +11,7 @@ jobs: timeout-minutes: 60 runs-on: ubuntu-22.04 env: - WASMTIME_VERSION: 18.0.3 + WASMTIME_VERSION: 22.0.0 WASI_SDK_VERSION: 21 WASI_SDK_PATH: /opt/wasi-sdk CROSS_BUILD_PYTHON: cross-build/build @@ -20,9 +20,9 @@ jobs: - uses: actions/checkout@v4 # No problem resolver registered as one doesn't currently exist for Clang. - name: "Install wasmtime" - uses: jcbhmr/setup-wasmtime@v2 + uses: bytecodealliance/actions/wasmtime/setup@v1 with: - wasmtime-version: ${{ env.WASMTIME_VERSION }} + version: ${{ env.WASMTIME_VERSION }} - name: "Restore WASI SDK" id: cache-wasi-sdk uses: actions/cache@v4 @@ -50,8 +50,10 @@ jobs: uses: actions/cache@v4 with: path: ${{ env.CROSS_BUILD_PYTHON }}/config.cache - # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }} + # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python. + # Include the hash of `Tools/wasm/wasi.py` as it may change the environment variables. + # (Make sure to keep the key in sync with the other config.cache step below.) + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }} - name: "Configure build Python" run: python3 Tools/wasm/wasi.py configure-build-python -- --config-cache --with-pydebug - name: "Make build Python" @@ -60,8 +62,8 @@ jobs: uses: actions/cache@v4 with: path: ${{ env.CROSS_BUILD_WASI }}/config.cache - # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-wasi-sdk-${{ env.WASI_SDK_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }} + # Should be kept in sync with the other config.cache step above. + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }} - name: "Configure host" # `--with-pydebug` inferred from configure-build-python run: python3 Tools/wasm/wasi.py configure-host -- --config-cache diff --git a/Doc/c-api/cell.rst b/Doc/c-api/cell.rst index f8cd0344fdd1c0..61eb994c370946 100644 --- a/Doc/c-api/cell.rst +++ b/Doc/c-api/cell.rst @@ -39,7 +39,8 @@ Cell objects are not likely to be useful elsewhere. .. c:function:: PyObject* PyCell_Get(PyObject *cell) - Return the contents of the cell *cell*. + Return the contents of the cell *cell*, which can be ``NULL``. + If *cell* is not a cell object, returns ``NULL`` with an exception set. .. c:function:: PyObject* PyCell_GET(PyObject *cell) @@ -52,8 +53,10 @@ Cell objects are not likely to be useful elsewhere. Set the contents of the cell object *cell* to *value*. This releases the reference to any current content of the cell. *value* may be ``NULL``. *cell* - must be non-``NULL``; if it is not a cell object, ``-1`` will be returned. On - success, ``0`` will be returned. + must be non-``NULL``. + + On success, return ``0``. + If *cell* is not a cell object, set an exception and return ``-1``. .. c:function:: void PyCell_SET(PyObject *cell, PyObject *value) diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index 63e3bed6727987..ce9d5a0f758b29 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -43,6 +43,8 @@ Module Objects to ``None``); the caller is responsible for providing a :attr:`__file__` attribute. + Return ``NULL`` with an exception set on error. + .. versionadded:: 3.3 .. versionchanged:: 3.4 @@ -265,6 +267,8 @@ of the following two module creation functions: API version *module_api_version*. If that version does not match the version of the running interpreter, a :exc:`RuntimeWarning` is emitted. + Return ``NULL`` with an exception set on error. + .. note:: Most uses of this function should be using :c:func:`PyModule_Create` @@ -461,6 +465,8 @@ objects dynamically. Note that both ``PyModule_FromDefAndSpec`` and If that version does not match the version of the running interpreter, a :exc:`RuntimeWarning` is emitted. + Return ``NULL`` with an exception set on error. + .. note:: Most uses of this function should be using :c:func:`PyModule_FromDefAndSpec` @@ -601,15 +607,16 @@ state: .. c:function:: int PyModule_AddIntConstant(PyObject *module, const char *name, long value) Add an integer constant to *module* as *name*. This convenience function can be - used from the module's initialization function. Return ``-1`` on error, ``0`` on - success. + used from the module's initialization function. + Return ``-1`` with an exception set on error, ``0`` on success. .. c:function:: int PyModule_AddStringConstant(PyObject *module, const char *name, const char *value) Add a string constant to *module* as *name*. This convenience function can be used from the module's initialization function. The string *value* must be - ``NULL``-terminated. Return ``-1`` on error, ``0`` on success. + ``NULL``-terminated. + Return ``-1`` with an exception set on error, ``0`` on success. .. c:macro:: PyModule_AddIntMacro(module, macro) @@ -617,7 +624,7 @@ state: Add an int constant to *module*. The name and the value are taken from *macro*. For example ``PyModule_AddIntMacro(module, AF_INET)`` adds the int constant *AF_INET* with the value of *AF_INET* to *module*. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. c:macro:: PyModule_AddStringMacro(module, macro) @@ -630,7 +637,7 @@ state: The type object is finalized by calling internally :c:func:`PyType_Ready`. The name of the type object is taken from the last component of :c:member:`~PyTypeObject.tp_name` after dot. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.9 @@ -643,7 +650,7 @@ state: import machinery assumes the module does not support running without the GIL. This function is only available in Python builds configured with :option:`--disable-gil`. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.13 @@ -682,14 +689,14 @@ since multiple such modules can be created from a single definition. The caller must hold the GIL. - Return 0 on success or -1 on failure. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.3 .. c:function:: int PyState_RemoveModule(PyModuleDef *def) Removes the module object created from *def* from the interpreter state. - Return 0 on success or -1 on failure. + Return ``-1`` with an exception set on error, ``0`` on success. The caller must hold the GIL. diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 8eeac3fc8a1e58..2103a64d8ffbb7 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -52,6 +52,7 @@ Object Protocol The reference is borrowed from the interpreter, and is valid until the interpreter finalization. + .. versionadded:: 3.13 diff --git a/Doc/c-api/slice.rst b/Doc/c-api/slice.rst index 27a1757c745d8b..8adf6a961378a3 100644 --- a/Doc/c-api/slice.rst +++ b/Doc/c-api/slice.rst @@ -23,7 +23,9 @@ Slice Objects Return a new slice object with the given values. The *start*, *stop*, and *step* parameters are used as the values of the slice object attributes of the same names. Any of the values may be ``NULL``, in which case the - ``None`` will be used for the corresponding attribute. Return ``NULL`` if + ``None`` will be used for the corresponding attribute. + + Return ``NULL`` with an exception set if the new object could not be allocated. @@ -52,7 +54,7 @@ Slice Objects of bounds indices are clipped in a manner consistent with the handling of normal slices. - Returns ``0`` on success and ``-1`` on error with exception set. + Return ``0`` on success and ``-1`` on error with an exception set. .. note:: This function is considered not safe for resizable sequences. @@ -95,7 +97,7 @@ Slice Objects ``PY_SSIZE_T_MIN`` to ``PY_SSIZE_T_MIN``, and silently boost the step values less than ``-PY_SSIZE_T_MAX`` to ``-PY_SSIZE_T_MAX``. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.6.1 diff --git a/Doc/conf.py b/Doc/conf.py index 8a14646801ebac..29b1b2db32718b 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -272,6 +272,9 @@ ('c:data', 'PyExc_UnicodeWarning'), ('c:data', 'PyExc_UserWarning'), ('c:data', 'PyExc_Warning'), + # Undocumented public C macros + ('c:macro', 'Py_BUILD_ASSERT'), + ('c:macro', 'Py_BUILD_ASSERT_EXPR'), # Do not error nit-picky mode builds when _SubParsersAction.add_parser cannot # be resolved, as the method is currently undocumented. For context, see # https://github.com/python/cpython/pull/103289. diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index 6232e173d9537d..647ff9da04d10d 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -251,9 +251,9 @@ attribute will include the package's path if imported:: >>> asyncio.__main__.__name__ 'asyncio.__main__' -This won't work for ``__main__.py`` files in the root directory of a .zip file -though. Hence, for consistency, minimal ``__main__.py`` like the :mod:`venv` -one mentioned below are preferred. +This won't work for ``__main__.py`` files in the root directory of a +``.zip`` file though. Hence, for consistency, a minimal ``__main__.py`` +without a ``__name__`` check is preferred. .. seealso:: diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 1d79f78e8e1b67..70bdd154d6c406 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -1262,6 +1262,9 @@ Executing code in thread or process pools The *executor* argument should be an :class:`concurrent.futures.Executor` instance. The default executor is used if *executor* is ``None``. + The default executor can be set by :meth:`loop.set_default_executor`, + otherwise, a :class:`concurrent.futures.ThreadPoolExecutor` will be + lazy-initialized and used by :func:`run_in_executor` if needed. Example:: diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 1d82f92ea67857..17348dd907bf67 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1934,6 +1934,10 @@ are always available. They are listed here in alphabetical order. .. versionchanged:: 3.12 Summation of floats switched to an algorithm that gives higher accuracy and better commutativity on most builds. + .. versionchanged:: 3.14 + Added specialization for summation of complexes, + using same algorithm as for summation of floats. + .. class:: super() super(type, object_or_type=None) diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index b582321515db56..52487b4737ae2f 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -389,7 +389,7 @@ the :mod:`glob` module.) that contains symbolic links. On Windows, it converts forward slashes to backward slashes. To normalize case, use :func:`normcase`. - .. note:: + .. note:: On POSIX systems, in accordance with `IEEE Std 1003.1 2013 Edition; 4.13 Pathname Resolution `_, if a pathname begins with exactly two slashes, the first component diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index d7fd56f4c4ff7f..f139abd2454d69 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -1539,7 +1539,7 @@ Creating files and directories Copying, renaming and deleting ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. method:: Path.copy(target, *, follow_symlinks=True) +.. method:: Path.copy(target, *, follow_symlinks=True, preserve_metadata=False) Copy the contents of this file to the *target* file. If *target* specifies a file that already exists, it will be replaced. @@ -1548,11 +1548,11 @@ Copying, renaming and deleting will be created as a symbolic link. If *follow_symlinks* is true and this file is a symbolic link, *target* will be a copy of the symlink target. - .. note:: - This method uses operating system functionality to copy file content - efficiently. The OS might also copy some metadata, such as file - permissions. After the copy is complete, users may wish to call - :meth:`Path.chmod` to set the permissions of the target file. + If *preserve_metadata* is false (the default), only the file data is + guaranteed to be copied. Set *preserve_metadata* to true to ensure that the + file mode (permissions), flags, last access and modification times, and + extended attributes are copied where supported. This argument has no effect + on Windows, where metadata is always preserved when copying. .. versionadded:: 3.14 diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst index 9721da7220d54d..d7940b3040bbdb 100644 --- a/Doc/library/profile.rst +++ b/Doc/library/profile.rst @@ -699,7 +699,7 @@ you are using :class:`profile.Profile` or :class:`cProfile.Profile`, As the :class:`cProfile.Profile` class cannot be calibrated, custom timer functions should be used with care and should be as fast as possible. For the best results with a custom timer, it might be necessary to hard-code it - in the C source of the internal :mod:`_lsprof` module. + in the C source of the internal :mod:`!_lsprof` module. Python 3.3 adds several new functions in :mod:`time` that can be used to make precise measurements of process or wall-clock time. For example, see diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 54cc7d1333d34e..d3f7cfb01d3c21 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2095,8 +2095,9 @@ expression support in the :mod:`re` module). If *sep* is given, consecutive delimiters are not grouped together and are deemed to delimit empty strings (for example, ``'1,,2'.split(',')`` returns ``['1', '', '2']``). The *sep* argument may consist of multiple characters - (for example, ``'1<>2<>3'.split('<>')`` returns ``['1', '2', '3']``). - Splitting an empty string with a specified separator returns ``['']``. + as a single delimiter (to split with multiple delimiters, use + :func:`re.split`). Splitting an empty string with a specified separator + returns ``['']``. For example:: @@ -2106,6 +2107,8 @@ expression support in the :mod:`re` module). ['1', '2,3'] >>> '1,2,,3,'.split(',') ['1', '2', '', '3', ''] + >>> '1<>2<>3<4'.split('<>') + ['1', '2', '3<4'] If *sep* is not specified or is ``None``, a different splitting algorithm is applied: runs of consecutive whitespace are regarded as a single separator, @@ -3149,10 +3152,9 @@ produce new objects. If *sep* is given, consecutive delimiters are not grouped together and are deemed to delimit empty subsequences (for example, ``b'1,,2'.split(b',')`` returns ``[b'1', b'', b'2']``). The *sep* argument may consist of a - multibyte sequence (for example, ``b'1<>2<>3'.split(b'<>')`` returns - ``[b'1', b'2', b'3']``). Splitting an empty sequence with a specified - separator returns ``[b'']`` or ``[bytearray(b'')]`` depending on the type - of object being split. The *sep* argument may be any + multibyte sequence as a single delimiter. Splitting an empty sequence with + a specified separator returns ``[b'']`` or ``[bytearray(b'')]`` depending + on the type of object being split. The *sep* argument may be any :term:`bytes-like object`. For example:: @@ -3163,6 +3165,8 @@ produce new objects. [b'1', b'2,3'] >>> b'1,2,,3,'.split(b',') [b'1', b'2', b'', b'3', b''] + >>> b'1<>2<>3<4'.split(b'<>') + [b'1', b'2', b'3<4'] If *sep* is not specified or is ``None``, a different splitting algorithm is applied: runs of consecutive ASCII whitespace are regarded as a single diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 4f6c0c63ae42be..618664b23f0680 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -293,7 +293,7 @@ statements, cannot be an unpacking) and the expression list, performs the binary operation specific to the type of assignment on the two operands, and assigns the result to the original target. The target is only evaluated once. -An augmented assignment expression like ``x += 1`` can be rewritten as ``x = x + +An augmented assignment statement like ``x += 1`` can be rewritten as ``x = x + 1`` to achieve a similar, but not exactly equal effect. In the augmented version, ``x`` is only evaluated once. Also, when possible, the actual operation is performed *in-place*, meaning that rather than creating a new object and diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt index 4e49ba1a8ededd..068fe0cb426ecd 100644 --- a/Doc/requirements-oldest-sphinx.txt +++ b/Doc/requirements-oldest-sphinx.txt @@ -14,7 +14,7 @@ python-docs-theme>=2022.1 alabaster==0.7.16 Babel==2.15.0 -certifi==2024.6.2 +certifi==2024.7.4 charset-normalizer==3.3.2 docutils==0.19 idna==3.7 diff --git a/Doc/tools/check-warnings.py b/Doc/tools/check-warnings.py index c50b00636c36ce..67623b83d3a67d 100644 --- a/Doc/tools/check-warnings.py +++ b/Doc/tools/check-warnings.py @@ -14,7 +14,7 @@ from typing import TextIO # Fail if NEWS nit found before this line number -NEWS_NIT_THRESHOLD = 200 +NEWS_NIT_THRESHOLD = 1700 # Exclude these whether they're dirty or clean, # because they trigger a rebuild of dirty files. diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index 2a1f06e2d286ff..2c73c224e4e8a1 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -427,7 +427,7 @@ Options for third-party dependencies .. option:: PANEL_CFLAGS .. option:: PANEL_LIBS - C compiler and Linker flags for PANEL, overriding ``pkg-config``. + C compiler and linker flags for PANEL, overriding ``pkg-config``. C compiler and linker flags for ``libpanel`` or ``libpanelw``, used by :mod:`curses.panel` module, overriding ``pkg-config``. @@ -615,7 +615,7 @@ also be used to improve performance. .. option:: --without-mimalloc - Disable the fast mimalloc allocator :ref:`mimalloc ` + Disable the fast :ref:`mimalloc ` allocator (enabled by default). See also :envvar:`PYTHONMALLOC` environment variable. diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index 9578ba0c9c9657..da9b45cd8e58b3 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -89,8 +89,12 @@ Improved Modules ast --- -Added :func:`ast.compare` for comparing two ASTs. -(Contributed by Batuhan Taskaya and Jeremy Hylton in :issue:`15987`.) +* Added :func:`ast.compare` for comparing two ASTs. + (Contributed by Batuhan Taskaya and Jeremy Hylton in :issue:`15987`.) + +* Add support for :func:`copy.replace` for AST nodes. + + (Contributed by Bénédikt Tran in :gh:`121141`.) os -- @@ -110,6 +114,16 @@ pathlib another. (Contributed by Barney Gale in :gh:`73991`.) +pdb +--- + +* Hard-coded breakpoints (:func:`breakpoint` and :func:`pdb.set_trace()`) now + reuse the most recent :class:`~pdb.Pdb` instance that calls + :meth:`~pdb.Pdb.set_trace()`, instead of creating a new one each time. + As a result, all the instance specific data like :pdbcmd:`display` and + :pdbcmd:`commands` are preserved across hard-coded breakpoints. + (Contributed by Tian Gao in :gh:`121450`.) + symtable -------- diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 8aef0f5ac26728..938dd273e7e102 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -1495,7 +1495,7 @@ The dictionary returned by :meth:`.SSLSocket.getpeercert` contains additional stat ---- -The :mod:`stat` module is now backed by a C implementation in :mod:`_stat`. A C +The :mod:`stat` module is now backed by a C implementation in :mod:`!_stat`. A C implementation is required as most of the values aren't standardized and are platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst index cd8a903327cc2f..75654f3e78eb16 100644 --- a/Doc/whatsnew/3.5.rst +++ b/Doc/whatsnew/3.5.rst @@ -1935,8 +1935,8 @@ specifying the namespace in which the code will be running. tkinter ------- -The :mod:`tkinter._fix` module used for setting up the Tcl/Tk environment -on Windows has been replaced by a private function in the :mod:`_tkinter` +The :mod:`!tkinter._fix` module used for setting up the Tcl/Tk environment +on Windows has been replaced by a private function in the :mod:`!_tkinter` module which makes no permanent changes to environment variables. (Contributed by Zachary Ware in :issue:`20035`.) diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst index 69d043bcf7efd5..ae750cb9bba696 100644 --- a/Doc/whatsnew/3.7.rst +++ b/Doc/whatsnew/3.7.rst @@ -2048,7 +2048,7 @@ The :mod:`macpath` is now deprecated and will be removed in Python 3.8. threading --------- -:mod:`dummy_threading` and :mod:`_dummy_thread` have been deprecated. It is +:mod:`!dummy_threading` and :mod:`!_dummy_thread` have been deprecated. It is no longer possible to build Python with threading disabled. Use :mod:`threading` instead. (Contributed by Antoine Pitrou in :issue:`31370`.) @@ -2184,7 +2184,7 @@ The following features and APIs have been removed from Python 3.7: ``socket.socketpair`` on Python 3.5 and newer. * :mod:`asyncio` no longer exports the :mod:`selectors` and - :mod:`_overlapped` modules as ``asyncio.selectors`` and + :mod:`!_overlapped` modules as ``asyncio.selectors`` and ``asyncio._overlapped``. Replace ``from asyncio import selectors`` with ``import selectors``. diff --git a/Include/cpython/modsupport.h b/Include/cpython/modsupport.h new file mode 100644 index 00000000000000..d3b88f58c82ca3 --- /dev/null +++ b/Include/cpython/modsupport.h @@ -0,0 +1,26 @@ +#ifndef Py_CPYTHON_MODSUPPORT_H +# error "this header file must not be included directly" +#endif + +// A data structure that can be used to run initialization code once in a +// thread-safe manner. The C++11 equivalent is std::call_once. +typedef struct { + uint8_t v; +} _PyOnceFlag; + +typedef struct _PyArg_Parser { + const char *format; + const char * const *keywords; + const char *fname; + const char *custom_msg; + _PyOnceFlag once; /* atomic one-time initialization flag */ + int is_kwtuple_owned; /* does this parser own the kwtuple object? */ + int pos; /* number of positional-only arguments */ + int min; /* minimal number of arguments */ + int max; /* maximal number of positional arguments */ + PyObject *kwtuple; /* tuple of keyword parameter names */ + struct _PyArg_Parser *next; +} _PyArg_Parser; + +PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywordsFast(PyObject *, PyObject *, + struct _PyArg_Parser *, ...); diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 55a139bb9158db..4ecef4f56edf42 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -510,6 +510,9 @@ _Py_atomic_load_ssize_acquire(const Py_ssize_t *obj); // See https://en.cppreference.com/w/cpp/atomic/atomic_thread_fence static inline void _Py_atomic_fence_seq_cst(void); +// Acquire fence +static inline void _Py_atomic_fence_acquire(void); + // Release fence static inline void _Py_atomic_fence_release(void); diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index f2ebdeeb5524e4..ef09954d53ac1d 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -542,6 +542,10 @@ static inline void _Py_atomic_fence_seq_cst(void) { __atomic_thread_fence(__ATOMIC_SEQ_CST); } + static inline void +_Py_atomic_fence_acquire(void) +{ __atomic_thread_fence(__ATOMIC_ACQUIRE); } + static inline void _Py_atomic_fence_release(void) { __atomic_thread_fence(__ATOMIC_RELEASE); } diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index f32995c1f578ac..84da21bdcbff4f 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -1066,6 +1066,18 @@ _Py_atomic_fence_seq_cst(void) #else # error "no implementation of _Py_atomic_fence_seq_cst" #endif +} + + static inline void +_Py_atomic_fence_acquire(void) +{ +#if defined(_M_ARM64) + __dmb(_ARM64_BARRIER_ISHLD); +#elif defined(_M_X64) || defined(_M_IX86) + _ReadBarrier(); +#else +# error "no implementation of _Py_atomic_fence_acquire" +#endif } static inline void diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index 0cdce4e6dd39f0..7c71e94c68f8e6 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -961,6 +961,13 @@ _Py_atomic_fence_seq_cst(void) atomic_thread_fence(memory_order_seq_cst); } + static inline void +_Py_atomic_fence_acquire(void) +{ + _Py_USING_STD; + atomic_thread_fence(memory_order_acquire); +} + static inline void _Py_atomic_fence_release(void) { diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index 42b4b03b10ca20..b36b4681f5dddb 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -100,7 +100,7 @@ PyAPI_FUNC(PyObject*) PyUnstable_Exc_PrepReraiseStar( /* In signalmodule.c */ -int PySignal_SetWakeupFd(int fd); +PyAPI_FUNC(int) PySignal_SetWakeupFd(int fd); /* Support for adding program text to SyntaxErrors */ diff --git a/Include/internal/mimalloc/mimalloc/atomic.h b/Include/internal/mimalloc/mimalloc/atomic.h index 52f82487685cdb..cdd9c372beafd5 100644 --- a/Include/internal/mimalloc/mimalloc/atomic.h +++ b/Include/internal/mimalloc/mimalloc/atomic.h @@ -23,7 +23,9 @@ terms of the MIT license. A copy of the license can be found in the file #define _Atomic(tp) std::atomic #define mi_atomic(name) std::atomic_##name #define mi_memory_order(name) std::memory_order_##name -#if !defined(ATOMIC_VAR_INIT) || (__cplusplus >= 202002L) // c++20, see issue #571 +#if (__cplusplus >= 202002L) // c++20, see issue #571 + #define MI_ATOMIC_VAR_INIT(x) x +#elif !defined(ATOMIC_VAR_INIT) #define MI_ATOMIC_VAR_INIT(x) x #else #define MI_ATOMIC_VAR_INIT(x) ATOMIC_VAR_INIT(x) @@ -39,7 +41,9 @@ terms of the MIT license. A copy of the license can be found in the file #include #define mi_atomic(name) atomic_##name #define mi_memory_order(name) memory_order_##name -#if !defined(ATOMIC_VAR_INIT) || (__STDC_VERSION__ >= 201710L) // c17, see issue #735 +#if (__STDC_VERSION__ >= 201710L) // c17, see issue #735 + #define MI_ATOMIC_VAR_INIT(x) x +#elif !defined(ATOMIC_VAR_INIT) #define MI_ATOMIC_VAR_INIT(x) x #else #define MI_ATOMIC_VAR_INIT(x) ATOMIC_VAR_INIT(x) diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index a1ac034e3e44af..325243e6a64e1f 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -76,15 +76,6 @@ int _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj); // Export for '_opcode' extension module -PyAPI_FUNC(int) _PyCompile_OpcodeIsValid(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasArg(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasConst(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasName(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasJump(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasFree(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasLocal(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasExc(int opcode); - PyAPI_FUNC(PyObject*) _PyCompile_GetUnaryIntrinsicName(int index); PyAPI_FUNC(PyObject*) _PyCompile_GetBinaryIntrinsicName(int index); diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index 9e0e1237915e82..56cc49432cc61e 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -322,6 +322,8 @@ _PyInlineValuesSize(PyTypeObject *tp) int _PyDict_DetachFromObject(PyDictObject *dict, PyObject *obj); +PyDictObject *_PyObject_MaterializeManagedDict_LockHeld(PyObject *); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 1e0368faa5b510..506c20ca1950bd 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -13,7 +13,7 @@ extern "C" { #include "pycore_code.h" // STATS #include "pycore_stackref.h" // _PyStackRef -/* See Objects/frame_layout.md for an explanation of the frame stack +/* See InternalDocs/frames.md for an explanation of the frame stack * including explanation of the PyFrameObject and _PyInterpreterFrame * structs. */ diff --git a/Include/internal/pycore_lock.h b/Include/internal/pycore_lock.h index 8aa73946e2c645..e6da083b807ce5 100644 --- a/Include/internal/pycore_lock.h +++ b/Include/internal/pycore_lock.h @@ -128,12 +128,6 @@ _PyRawMutex_Unlock(_PyRawMutex *m) _PyRawMutex_UnlockSlow(m); } -// A data structure that can be used to run initialization code once in a -// thread-safe manner. The C++11 equivalent is std::call_once. -typedef struct { - uint8_t v; -} _PyOnceFlag; - // Type signature for one-time initialization functions. The function should // return 0 on success and -1 on failure. typedef int _Py_once_fn_t(void *arg); @@ -234,12 +228,12 @@ PyAPI_FUNC(void) _PySeqLock_AbandonWrite(_PySeqLock *seqlock); PyAPI_FUNC(uint32_t) _PySeqLock_BeginRead(_PySeqLock *seqlock); // End the read operation and confirm that the sequence number has not changed. -// Returns 1 if the read was successful or 0 if the read should be re-tried. -PyAPI_FUNC(uint32_t) _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous); +// Returns 1 if the read was successful or 0 if the read should be retried. +PyAPI_FUNC(int) _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous); // Check if the lock was held during a fork and clear the lock. Returns 1 -// if the lock was held and any associated datat should be cleared. -PyAPI_FUNC(uint32_t) _PySeqLock_AfterFork(_PySeqLock *seqlock); +// if the lock was held and any associated data should be cleared. +PyAPI_FUNC(int) _PySeqLock_AfterFork(_PySeqLock *seqlock); #ifdef __cplusplus } diff --git a/Include/internal/pycore_modsupport.h b/Include/internal/pycore_modsupport.h index 3d3cd6722528e9..11fde814875938 100644 --- a/Include/internal/pycore_modsupport.h +++ b/Include/internal/pycore_modsupport.h @@ -67,24 +67,6 @@ PyAPI_FUNC(void) _PyArg_BadArgument( // --- _PyArg_Parser API --------------------------------------------------- -typedef struct _PyArg_Parser { - const char *format; - const char * const *keywords; - const char *fname; - const char *custom_msg; - _PyOnceFlag once; /* atomic one-time initialization flag */ - int is_kwtuple_owned; /* does this parser own the kwtuple object? */ - int pos; /* number of positional-only arguments */ - int min; /* minimal number of arguments */ - int max; /* maximal number of positional arguments */ - PyObject *kwtuple; /* tuple of keyword parameter names */ - struct _PyArg_Parser *next; -} _PyArg_Parser; - -// Export for '_testclinic' shared extension -PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywordsFast(PyObject *, PyObject *, - struct _PyArg_Parser *, ...); - // Export for '_dbm' shared extension PyAPI_FUNC(int) _PyArg_ParseStackAndKeywords( PyObject *const *args, diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index bc67377a89c17f..d4ffd977940a02 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -98,13 +98,6 @@ typedef struct _Py_DebugOffsets { uint64_t owner; } interpreter_frame; - // CFrame offset; - struct _cframe { - uint64_t size; - uint64_t current_frame; - uint64_t previous; - } cframe; - // Code object offset; struct _code_object { uint64_t size; diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h index 4cfdf92459c70a..d9ed16a3d2321f 100644 --- a/Include/internal/pycore_symtable.h +++ b/Include/internal/pycore_symtable.h @@ -127,12 +127,7 @@ typedef struct _symtable_entry { unsigned ste_can_see_class_scope : 1; /* true if this block can see names bound in an enclosing class scope */ int ste_comp_iter_expr; /* non-zero if visiting a comprehension range expression */ - int ste_lineno; /* first line of block */ - int ste_col_offset; /* offset of first line of block */ - int ste_end_lineno; /* end line of block */ - int ste_end_col_offset; /* end offset of first line of block */ - int ste_opt_lineno; /* lineno of last exec or import * */ - int ste_opt_col_offset; /* offset of last exec or import * */ + _Py_SourceLocation ste_loc; /* source location of block */ struct _symtable_entry *ste_annotation_block; /* symbol table entry for this entry's annotations */ struct symtable *ste_table; } PySTEntryObject; diff --git a/Include/modsupport.h b/Include/modsupport.h index ea4c0fce9f4562..af995f567b004c 100644 --- a/Include/modsupport.h +++ b/Include/modsupport.h @@ -134,6 +134,12 @@ PyAPI_FUNC(PyObject *) PyModule_FromDefAndSpec2(PyModuleDef *def, #endif /* New in 3.5 */ +#ifndef Py_LIMITED_API +# define Py_CPYTHON_MODSUPPORT_H +# include "cpython/modsupport.h" +# undef Py_CPYTHON_MODSUPPORT_H +#endif + #ifdef __cplusplus } #endif diff --git a/Include/object.h b/Include/object.h index a1e5b33b0fdaae..abfdb6ce24df21 100644 --- a/Include/object.h +++ b/Include/object.h @@ -249,11 +249,7 @@ PyAPI_FUNC(PyTypeObject*) Py_TYPE(PyObject *ob); #else static inline PyTypeObject* _Py_TYPE(PyObject *ob) { - #if defined(Py_GIL_DISABLED) - return (PyTypeObject *)_Py_atomic_load_ptr_relaxed(&ob->ob_type); - #else return ob->ob_type; - #endif } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 # define Py_TYPE(ob) _Py_TYPE(_PyObject_CAST(ob)) @@ -284,11 +280,7 @@ static inline int Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { static inline void Py_SET_TYPE(PyObject *ob, PyTypeObject *type) { -#ifdef Py_GIL_DISABLED - _Py_atomic_store_ptr(&ob->ob_type, type); -#else ob->ob_type = type; -#endif } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 # define Py_SET_TYPE(ob, type) Py_SET_TYPE(_PyObject_CAST(ob), type) diff --git a/InternalDocs/compiler.md b/InternalDocs/compiler.md index 17fe0df6e1db10..b3dc0a48069969 100644 --- a/InternalDocs/compiler.md +++ b/InternalDocs/compiler.md @@ -623,8 +623,8 @@ Important files Objects ======= -* [Objects/locations.md](https://github.com/python/cpython/blob/main/Objects/locations.md): Describes the location table -* [Objects/frame_layout.md](https://github.com/python/cpython/blob/main/Objects/frame_layout.md): Describes the frame stack +* [Locations](locations.md): Describes the location table +* [Frames](frames.md): Describes frames and the frame stack * [Objects/object_layout.md](https://github.com/python/cpython/blob/main/Objects/object_layout.md): Describes object layout for 3.11 and later * [Exception Handling](exception_handling.md): Describes the exception table diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 7d298e1674b49a..75b5ad1b1a47d2 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -1577,6 +1577,7 @@ def __init__(self, file, mode='r', closefd=True, opener=None): self._blksize = getattr(fdfstat, 'st_blksize', 0) if self._blksize <= 1: self._blksize = DEFAULT_BUFFER_SIZE + self._estimated_size = fdfstat.st_size if _setmode: # don't translate newlines (\r\n <=> \n) @@ -1654,14 +1655,18 @@ def readall(self): """ self._checkClosed() self._checkReadable() - bufsize = DEFAULT_BUFFER_SIZE - try: - pos = os.lseek(self._fd, 0, SEEK_CUR) - end = os.fstat(self._fd).st_size - if end >= pos: - bufsize = end - pos + 1 - except OSError: - pass + if self._estimated_size <= 0: + bufsize = DEFAULT_BUFFER_SIZE + else: + bufsize = self._estimated_size + 1 + + if self._estimated_size > 65536: + try: + pos = os.lseek(self._fd, 0, SEEK_CUR) + if self._estimated_size >= pos: + bufsize = self._estimated_size - pos + 1 + except OSError: + pass result = bytearray() while True: @@ -1737,6 +1742,7 @@ def truncate(self, size=None): if size is None: size = self.tell() os.ftruncate(self._fd, size) + self._estimated_size = size return size def close(self): diff --git a/Lib/asyncio/__main__.py b/Lib/asyncio/__main__.py index 91fff9aaee337b..3e2fe93943d4ed 100644 --- a/Lib/asyncio/__main__.py +++ b/Lib/asyncio/__main__.py @@ -116,7 +116,7 @@ def run(self): if err := check(): raise RuntimeError(err) except Exception as e: - console.interact(banner="", exitmsg=exit_message) + console.interact(banner="", exitmsg="") else: try: run_multiline_interactive_console(console=console) diff --git a/Lib/bdb.py b/Lib/bdb.py index aa621053cfb4bc..d7543017940d4f 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -369,6 +369,7 @@ def set_trace(self, frame=None): If frame is not specified, debugging starts from caller's frame. """ + sys.settrace(None) if frame is None: frame = sys._getframe().f_back self.reset() diff --git a/Lib/copy.py b/Lib/copy.py index 7a1907d75494d7..a79976d3a658f0 100644 --- a/Lib/copy.py +++ b/Lib/copy.py @@ -4,8 +4,9 @@ import copy - x = copy.copy(y) # make a shallow copy of y - x = copy.deepcopy(y) # make a deep copy of y + x = copy.copy(y) # make a shallow copy of y + x = copy.deepcopy(y) # make a deep copy of y + x = copy.replace(y, a=1, b=2) # new object with fields replaced, as defined by `__replace__` For module specific errors, copy.Error is raised. @@ -56,7 +57,7 @@ class Error(Exception): pass error = Error # backward compatibility -__all__ = ["Error", "copy", "deepcopy"] +__all__ = ["Error", "copy", "deepcopy", "replace"] def copy(x): """Shallow copy operation on arbitrary Python objects. diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py index 7243d052cc27f3..8403ef9b44ad1a 100644 --- a/Lib/importlib/util.py +++ b/Lib/importlib/util.py @@ -13,7 +13,6 @@ import _imp import sys -import threading import types @@ -257,6 +256,9 @@ def create_module(self, spec): def exec_module(self, module): """Make the module load lazily.""" + # Threading is only needed for lazy loading, and importlib.util can + # be pulled in at interpreter startup, so defer until needed. + import threading module.__spec__.loader = self.loader module.__loader__ = self.loader # Don't need to worry about deep-copying as trying to set an attribute diff --git a/Lib/multiprocessing/shared_memory.py b/Lib/multiprocessing/shared_memory.py index 67e70fdc27cf31..99a8ce3320ad4e 100644 --- a/Lib/multiprocessing/shared_memory.py +++ b/Lib/multiprocessing/shared_memory.py @@ -539,6 +539,6 @@ def index(self, value): if value == entry: return position else: - raise ValueError(f"{value!r} not in this container") + raise ValueError("ShareableList.index(x): x not in list") __class_getitem__ = classmethod(types.GenericAlias) diff --git a/Lib/os.py b/Lib/os.py index 4b48afb040e565..aaa758d955fe4c 100644 --- a/Lib/os.py +++ b/Lib/os.py @@ -373,61 +373,45 @@ def walk(top, topdown=True, onerror=None, followlinks=False): # minor reason when (say) a thousand readable directories are still # left to visit. try: - scandir_it = scandir(top) + with scandir(top) as entries: + for entry in entries: + try: + if followlinks is _walk_symlinks_as_files: + is_dir = entry.is_dir(follow_symlinks=False) and not entry.is_junction() + else: + is_dir = entry.is_dir() + except OSError: + # If is_dir() raises an OSError, consider the entry not to + # be a directory, same behaviour as os.path.isdir(). + is_dir = False + + if is_dir: + dirs.append(entry.name) + else: + nondirs.append(entry.name) + + if not topdown and is_dir: + # Bottom-up: traverse into sub-directory, but exclude + # symlinks to directories if followlinks is False + if followlinks: + walk_into = True + else: + try: + is_symlink = entry.is_symlink() + except OSError: + # If is_symlink() raises an OSError, consider the + # entry not to be a symbolic link, same behaviour + # as os.path.islink(). + is_symlink = False + walk_into = not is_symlink + + if walk_into: + walk_dirs.append(entry.path) except OSError as error: if onerror is not None: onerror(error) continue - cont = False - with scandir_it: - while True: - try: - try: - entry = next(scandir_it) - except StopIteration: - break - except OSError as error: - if onerror is not None: - onerror(error) - cont = True - break - - try: - if followlinks is _walk_symlinks_as_files: - is_dir = entry.is_dir(follow_symlinks=False) and not entry.is_junction() - else: - is_dir = entry.is_dir() - except OSError: - # If is_dir() raises an OSError, consider the entry not to - # be a directory, same behaviour as os.path.isdir(). - is_dir = False - - if is_dir: - dirs.append(entry.name) - else: - nondirs.append(entry.name) - - if not topdown and is_dir: - # Bottom-up: traverse into sub-directory, but exclude - # symlinks to directories if followlinks is False - if followlinks: - walk_into = True - else: - try: - is_symlink = entry.is_symlink() - except OSError: - # If is_symlink() raises an OSError, consider the - # entry not to be a symbolic link, same behaviour - # as os.path.islink(). - is_symlink = False - walk_into = not is_symlink - - if walk_into: - walk_dirs.append(entry.path) - if cont: - continue - if topdown: # Yield before sub-directory traversal if going top down yield top, dirs, nondirs diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index b5f903ec1f03ce..05f55badd77c58 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -781,7 +781,32 @@ def mkdir(self, mode=0o777, parents=False, exist_ok=False): """ raise UnsupportedOperation(self._unsupported_msg('mkdir()')) - def copy(self, target, follow_symlinks=True): + # Metadata keys supported by this path type. + _readable_metadata = _writable_metadata = frozenset() + + def _read_metadata(self, keys=None, *, follow_symlinks=True): + """ + Returns path metadata as a dict with string keys. + """ + raise UnsupportedOperation(self._unsupported_msg('_read_metadata()')) + + def _write_metadata(self, metadata, *, follow_symlinks=True): + """ + Sets path metadata from the given dict with string keys. + """ + raise UnsupportedOperation(self._unsupported_msg('_write_metadata()')) + + def _copy_metadata(self, target, *, follow_symlinks=True): + """ + Copies metadata (permissions, timestamps, etc) from this path to target. + """ + # Metadata types supported by both source and target. + keys = self._readable_metadata & target._writable_metadata + if keys: + metadata = self._read_metadata(keys, follow_symlinks=follow_symlinks) + target._write_metadata(metadata, follow_symlinks=follow_symlinks) + + def copy(self, target, *, follow_symlinks=True, preserve_metadata=False): """ Copy the contents of this file to the given target. If this file is a symlink and follow_symlinks is false, a symlink will be created at the @@ -793,6 +818,8 @@ def copy(self, target, follow_symlinks=True): raise OSError(f"{self!r} and {target!r} are the same file") if not follow_symlinks and self.is_symlink(): target.symlink_to(self.readlink()) + if preserve_metadata: + self._copy_metadata(target, follow_symlinks=False) return with self.open('rb') as source_f: try: @@ -805,6 +832,8 @@ def copy(self, target, follow_symlinks=True): f'Directory does not exist: {target}') from e else: raise + if preserve_metadata: + self._copy_metadata(target) def copytree(self, target, *, follow_symlinks=True, dirs_exist_ok=False, ignore=None, on_error=None): diff --git a/Lib/pathlib/_local.py b/Lib/pathlib/_local.py index acb57214b81865..eae8a30c876f19 100644 --- a/Lib/pathlib/_local.py +++ b/Lib/pathlib/_local.py @@ -17,7 +17,8 @@ except ImportError: grp = None -from ._os import UnsupportedOperation, copyfile +from ._os import (UnsupportedOperation, copyfile, file_metadata_keys, + read_file_metadata, write_file_metadata) from ._abc import PurePathBase, PathBase @@ -781,8 +782,12 @@ def mkdir(self, mode=0o777, parents=False, exist_ok=False): if not exist_ok or not self.is_dir(): raise + _readable_metadata = _writable_metadata = file_metadata_keys + _read_metadata = read_file_metadata + _write_metadata = write_file_metadata + if copyfile: - def copy(self, target, follow_symlinks=True): + def copy(self, target, *, follow_symlinks=True, preserve_metadata=False): """ Copy the contents of this file to the given target. If this file is a symlink and follow_symlinks is false, a symlink will be created at the @@ -799,7 +804,8 @@ def copy(self, target, follow_symlinks=True): return except UnsupportedOperation: pass # Fall through to generic code. - PathBase.copy(self, target, follow_symlinks=follow_symlinks) + PathBase.copy(self, target, follow_symlinks=follow_symlinks, + preserve_metadata=preserve_metadata) def chmod(self, mode, *, follow_symlinks=True): """ diff --git a/Lib/pathlib/_os.py b/Lib/pathlib/_os.py index 61923b5e410b5c..164ee8e9034427 100644 --- a/Lib/pathlib/_os.py +++ b/Lib/pathlib/_os.py @@ -2,7 +2,7 @@ Low-level OS functionality wrappers used by pathlib. """ -from errno import EBADF, EOPNOTSUPP, ETXTBSY, EXDEV +from errno import * import os import stat import sys @@ -178,3 +178,100 @@ def copyfileobj(source_f, target_f): write_target = target_f.write while buf := read_source(1024 * 1024): write_target(buf) + + +# Kinds of metadata supported by the operating system. +file_metadata_keys = {'mode', 'times_ns'} +if hasattr(os.stat_result, 'st_flags'): + file_metadata_keys.add('flags') +if hasattr(os, 'listxattr'): + file_metadata_keys.add('xattrs') +file_metadata_keys = frozenset(file_metadata_keys) + + +def read_file_metadata(path, keys=None, *, follow_symlinks=True): + """ + Returns local path metadata as a dict with string keys. + """ + if keys is None: + keys = file_metadata_keys + assert keys.issubset(file_metadata_keys) + result = {} + for key in keys: + if key == 'xattrs': + try: + result['xattrs'] = [ + (attr, os.getxattr(path, attr, follow_symlinks=follow_symlinks)) + for attr in os.listxattr(path, follow_symlinks=follow_symlinks)] + except OSError as err: + if err.errno not in (EPERM, ENOTSUP, ENODATA, EINVAL, EACCES): + raise + continue + st = os.stat(path, follow_symlinks=follow_symlinks) + if key == 'mode': + result['mode'] = stat.S_IMODE(st.st_mode) + elif key == 'times_ns': + result['times_ns'] = st.st_atime_ns, st.st_mtime_ns + elif key == 'flags': + result['flags'] = st.st_flags + return result + + +def write_file_metadata(path, metadata, *, follow_symlinks=True): + """ + Sets local path metadata from the given dict with string keys. + """ + assert frozenset(metadata.keys()).issubset(file_metadata_keys) + + def _nop(*args, ns=None, follow_symlinks=None): + pass + + if follow_symlinks: + # use the real function if it exists + def lookup(name): + return getattr(os, name, _nop) + else: + # use the real function only if it exists + # *and* it supports follow_symlinks + def lookup(name): + fn = getattr(os, name, _nop) + if fn in os.supports_follow_symlinks: + return fn + return _nop + + times_ns = metadata.get('times_ns') + if times_ns is not None: + lookup("utime")(path, ns=times_ns, follow_symlinks=follow_symlinks) + # We must copy extended attributes before the file is (potentially) + # chmod()'ed read-only, otherwise setxattr() will error with -EACCES. + xattrs = metadata.get('xattrs') + if xattrs is not None: + for attr, value in xattrs: + try: + os.setxattr(path, attr, value, follow_symlinks=follow_symlinks) + except OSError as e: + if e.errno not in (EPERM, ENOTSUP, ENODATA, EINVAL, EACCES): + raise + mode = metadata.get('mode') + if mode is not None: + try: + lookup("chmod")(path, mode, follow_symlinks=follow_symlinks) + except NotImplementedError: + # if we got a NotImplementedError, it's because + # * follow_symlinks=False, + # * lchown() is unavailable, and + # * either + # * fchownat() is unavailable or + # * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW. + # (it returned ENOSUP.) + # therefore we're out of options--we simply cannot chown the + # symlink. give up, suppress the error. + # (which is what shutil always did in this circumstance.) + pass + flags = metadata.get('flags') + if flags is not None: + try: + lookup("chflags")(path, flags, follow_symlinks=follow_symlinks) + except OSError as why: + if why.errno not in (EOPNOTSUPP, ENOTSUP): + raise diff --git a/Lib/pdb.py b/Lib/pdb.py index 4af16d0a087c8c..7ff973149b167b 100644 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -306,6 +306,8 @@ class Pdb(bdb.Bdb, cmd.Cmd): _file_mtime_table = {} + _last_pdb_instance = None + def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None, nosigint=False, readrc=True): bdb.Bdb.__init__(self, skip=skip) @@ -359,6 +361,12 @@ def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None, self._chained_exceptions = tuple() self._chained_exception_index = 0 + def set_trace(self, frame=None): + Pdb._last_pdb_instance = self + if frame is None: + frame = sys._getframe().f_back + super().set_trace(frame) + def sigint_handler(self, signum, frame): if self.allow_kbdint: raise KeyboardInterrupt @@ -2350,7 +2358,10 @@ def set_trace(*, header=None): an assertion fails). If given, *header* is printed to the console just before debugging begins. """ - pdb = Pdb() + if Pdb._last_pdb_instance is not None: + pdb = Pdb._last_pdb_instance + else: + pdb = Pdb() if header is not None: pdb.message(header) pdb.set_trace(sys._getframe().f_back) @@ -2481,9 +2492,12 @@ def main(): traceback.print_exception(e, colorize=_colorize.can_colorize()) print("Uncaught exception. Entering post mortem debugging") print("Running 'cont' or 'step' will restart the program") - pdb.interaction(None, e) - print(f"Post mortem debugger finished. The {target} will " - "be restarted") + try: + pdb.interaction(None, e) + except Restart: + print("Restarting", target, "with arguments:") + print("\t" + " ".join(sys.argv[1:])) + continue if pdb._user_requested_quit: break print("The program finished and will be restarted") diff --git a/Lib/test/_test_eintr.py b/Lib/test/_test_eintr.py index 15586f15dfab30..493932d6c6d441 100644 --- a/Lib/test/_test_eintr.py +++ b/Lib/test/_test_eintr.py @@ -18,6 +18,7 @@ import socket import subprocess import sys +import textwrap import time import unittest @@ -492,29 +493,31 @@ def test_devpoll(self): self.check_elapsed_time(dt) -class FNTLEINTRTest(EINTRBaseTest): +class FCNTLEINTRTest(EINTRBaseTest): def _lock(self, lock_func, lock_name): self.addCleanup(os_helper.unlink, os_helper.TESTFN) - code = '\n'.join(( - "import fcntl, time", - "with open('%s', 'wb') as f:" % os_helper.TESTFN, - " fcntl.%s(f, fcntl.LOCK_EX)" % lock_name, - " time.sleep(%s)" % self.sleep_time)) - start_time = time.monotonic() - proc = self.subprocess(code) + rd1, wr1 = os.pipe() + rd2, wr2 = os.pipe() + for fd in (rd1, wr1, rd2, wr2): + self.addCleanup(os.close, fd) + code = textwrap.dedent(f""" + import fcntl, os, time + with open('{os_helper.TESTFN}', 'wb') as f: + fcntl.{lock_name}(f, fcntl.LOCK_EX) + os.write({wr1}, b"ok") + _ = os.read({rd2}, 2) # wait for parent process + time.sleep({self.sleep_time}) + """) + proc = self.subprocess(code, pass_fds=[wr1, rd2]) with kill_on_error(proc): with open(os_helper.TESTFN, 'wb') as f: # synchronize the subprocess + ok = os.read(rd1, 2) + self.assertEqual(ok, b"ok") + + # notify the child that the parent is ready start_time = time.monotonic() - for _ in support.sleeping_retry(support.LONG_TIMEOUT, error=False): - try: - lock_func(f, fcntl.LOCK_EX | fcntl.LOCK_NB) - lock_func(f, fcntl.LOCK_UN) - except BlockingIOError: - break - else: - dt = time.monotonic() - start_time - raise Exception("failed to sync child in %.1f sec" % dt) + os.write(wr2, b"go") # the child locked the file just a moment ago for 'sleep_time' seconds # that means that the lock below will block for 'sleep_time' minus some diff --git a/Lib/test/crashers/README b/Lib/test/crashers/README index d844385113eb45..7111946b93b280 100644 --- a/Lib/test/crashers/README +++ b/Lib/test/crashers/README @@ -15,7 +15,3 @@ what the variables are. Once the crash is fixed, the test case should be moved into an appropriate test (even if it was originally from the test suite). This ensures the regression doesn't happen again. And if it does, it should be easier to track down. - -Also see Lib/test_crashers.py which exercises the crashers in this directory. -In particular, make sure to add any new infinite loop crashers to the black -list so it doesn't try to run them. diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index 0197e50125d96e..2a3449016fe951 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -264,6 +264,12 @@ def clear_caches(): for f in typing._cleanups: f() + import inspect + abs_classes = filter(inspect.isabstract, typing.__dict__.values()) + for abc in abs_classes: + for obj in abc.__subclasses__() + [abc]: + obj._abc_caches_clear() + try: fractions = sys.modules['fractions'] except KeyError: diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 18455bb6e0ff52..7f6579319589b4 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -387,7 +387,7 @@ def skip_if_buildbot(reason=None): reason = 'not suitable for buildbots' try: isbuildbot = getpass.getuser().lower() == 'buildbot' - except (KeyError, EnvironmentError) as err: + except (KeyError, OSError) as err: warnings.warn(f'getpass.getuser() failed {err}.', RuntimeWarning) isbuildbot = False return unittest.skipIf(isbuildbot, reason) diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 3832b6193ce6c0..ebbde5bc38ad29 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -1149,6 +1149,25 @@ def test_none_checks(self) -> None: class CopyTests(unittest.TestCase): """Test copying and pickling AST nodes.""" + @staticmethod + def iter_ast_classes(): + """Iterate over the (native) subclasses of ast.AST recursively. + + This excludes the special class ast.Index since its constructor + returns an integer. + """ + def do(cls): + if cls.__module__ != 'ast': + return + if cls is ast.Index: + return + + yield cls + for sub in cls.__subclasses__(): + yield from do(sub) + + yield from do(ast.AST) + def test_pickling(self): import pickle @@ -1218,6 +1237,259 @@ def test_copy_with_parents(self): )): self.assertEqual(to_tuple(child.parent), to_tuple(node)) + def test_replace_interface(self): + for klass in self.iter_ast_classes(): + with self.subTest(klass=klass): + self.assertTrue(hasattr(klass, '__replace__')) + + fields = set(klass._fields) + with self.subTest(klass=klass, fields=fields): + node = klass(**dict.fromkeys(fields)) + # forbid positional arguments in replace() + self.assertRaises(TypeError, copy.replace, node, 1) + self.assertRaises(TypeError, node.__replace__, 1) + + def test_replace_native(self): + for klass in self.iter_ast_classes(): + fields = set(klass._fields) + attributes = set(klass._attributes) + + with self.subTest(klass=klass, fields=fields, attributes=attributes): + # use of object() to ensure that '==' and 'is' + # behave similarly in ast.compare(node, repl) + old_fields = {field: object() for field in fields} + old_attrs = {attr: object() for attr in attributes} + + # check shallow copy + node = klass(**old_fields) + repl = copy.replace(node) + self.assertTrue(ast.compare(node, repl, compare_attributes=True)) + # check when passing using attributes (they may be optional!) + node = klass(**old_fields, **old_attrs) + repl = copy.replace(node) + self.assertTrue(ast.compare(node, repl, compare_attributes=True)) + + for field in fields: + # check when we sometimes have attributes and sometimes not + for init_attrs in [{}, old_attrs]: + node = klass(**old_fields, **init_attrs) + # only change a single field (do not change attributes) + new_value = object() + repl = copy.replace(node, **{field: new_value}) + for f in fields: + old_value = old_fields[f] + # assert that there is no side-effect + self.assertIs(getattr(node, f), old_value) + # check the changes + if f != field: + self.assertIs(getattr(repl, f), old_value) + else: + self.assertIs(getattr(repl, f), new_value) + self.assertFalse(ast.compare(node, repl, compare_attributes=True)) + + for attribute in attributes: + node = klass(**old_fields, **old_attrs) + # only change a single attribute (do not change fields) + new_attr = object() + repl = copy.replace(node, **{attribute: new_attr}) + for a in attributes: + old_attr = old_attrs[a] + # assert that there is no side-effect + self.assertIs(getattr(node, a), old_attr) + # check the changes + if a != attribute: + self.assertIs(getattr(repl, a), old_attr) + else: + self.assertIs(getattr(repl, a), new_attr) + self.assertFalse(ast.compare(node, repl, compare_attributes=True)) + + def test_replace_accept_known_class_fields(self): + nid, ctx = object(), object() + + node = ast.Name(id=nid, ctx=ctx) + self.assertIs(node.id, nid) + self.assertIs(node.ctx, ctx) + + new_nid = object() + repl = copy.replace(node, id=new_nid) + # assert that there is no side-effect + self.assertIs(node.id, nid) + self.assertIs(node.ctx, ctx) + # check the changes + self.assertIs(repl.id, new_nid) + self.assertIs(repl.ctx, node.ctx) # no changes + + def test_replace_accept_known_class_attributes(self): + node = ast.parse('x').body[0].value + self.assertEqual(node.id, 'x') + self.assertEqual(node.lineno, 1) + + # constructor allows any type so replace() should do the same + lineno = object() + repl = copy.replace(node, lineno=lineno) + # assert that there is no side-effect + self.assertEqual(node.lineno, 1) + # check the changes + self.assertEqual(repl.id, node.id) + self.assertEqual(repl.ctx, node.ctx) + self.assertEqual(repl.lineno, lineno) + + _, _, state = node.__reduce__() + self.assertEqual(state['id'], 'x') + self.assertEqual(state['ctx'], node.ctx) + self.assertEqual(state['lineno'], 1) + + _, _, state = repl.__reduce__() + self.assertEqual(state['id'], 'x') + self.assertEqual(state['ctx'], node.ctx) + self.assertEqual(state['lineno'], lineno) + + def test_replace_accept_known_custom_class_fields(self): + class MyNode(ast.AST): + _fields = ('name', 'data') + __annotations__ = {'name': str, 'data': object} + __match_args__ = ('name', 'data') + + name, data = 'name', object() + + node = MyNode(name, data) + self.assertIs(node.name, name) + self.assertIs(node.data, data) + # check shallow copy + repl = copy.replace(node) + # assert that there is no side-effect + self.assertIs(node.name, name) + self.assertIs(node.data, data) + # check the shallow copy + self.assertIs(repl.name, name) + self.assertIs(repl.data, data) + + node = MyNode(name, data) + repl_data = object() + # replace custom but known field + repl = copy.replace(node, data=repl_data) + # assert that there is no side-effect + self.assertIs(node.name, name) + self.assertIs(node.data, data) + # check the changes + self.assertIs(repl.name, node.name) + self.assertIs(repl.data, repl_data) + + def test_replace_accept_known_custom_class_attributes(self): + class MyNode(ast.AST): + x = 0 + y = 1 + _attributes = ('x', 'y') + + node = MyNode() + self.assertEqual(node.x, 0) + self.assertEqual(node.y, 1) + + y = object() + # custom attributes are currently not supported and raise a warning + # because the allowed attributes are hard-coded ! + msg = ( + "MyNode.__init__ got an unexpected keyword argument 'y'. " + "Support for arbitrary keyword arguments is deprecated and " + "will be removed in Python 3.15" + ) + with self.assertWarnsRegex(DeprecationWarning, re.escape(msg)): + repl = copy.replace(node, y=y) + # assert that there is no side-effect + self.assertEqual(node.x, 0) + self.assertEqual(node.y, 1) + # check the changes + self.assertEqual(repl.x, 0) + self.assertEqual(repl.y, y) + + def test_replace_ignore_known_custom_instance_fields(self): + node = ast.parse('x').body[0].value + node.extra = extra = object() # add instance 'extra' field + context = node.ctx + + # assert initial values + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertIs(node.extra, extra) + # shallow copy, but drops extra fields + repl = copy.replace(node) + # assert that there is no side-effect + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertIs(node.extra, extra) + # verify that the 'extra' field is not kept + self.assertIs(repl.id, 'x') + self.assertIs(repl.ctx, context) + self.assertRaises(AttributeError, getattr, repl, 'extra') + + # change known native field + repl = copy.replace(node, id='y') + # assert that there is no side-effect + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertIs(node.extra, extra) + # verify that the 'extra' field is not kept + self.assertIs(repl.id, 'y') + self.assertIs(repl.ctx, context) + self.assertRaises(AttributeError, getattr, repl, 'extra') + + def test_replace_reject_missing_field(self): + # case: warn if deleted field is not replaced + node = ast.parse('x').body[0].value + context = node.ctx + del node.id + + self.assertRaises(AttributeError, getattr, node, 'id') + self.assertIs(node.ctx, context) + msg = "Name.__replace__ missing 1 keyword argument: 'id'." + with self.assertRaisesRegex(TypeError, re.escape(msg)): + copy.replace(node) + # assert that there is no side-effect + self.assertRaises(AttributeError, getattr, node, 'id') + self.assertIs(node.ctx, context) + + # case: do not raise if deleted field is replaced + node = ast.parse('x').body[0].value + context = node.ctx + del node.id + + self.assertRaises(AttributeError, getattr, node, 'id') + self.assertIs(node.ctx, context) + repl = copy.replace(node, id='y') + # assert that there is no side-effect + self.assertRaises(AttributeError, getattr, node, 'id') + self.assertIs(node.ctx, context) + self.assertIs(repl.id, 'y') + self.assertIs(repl.ctx, context) + + def test_replace_reject_known_custom_instance_fields_commits(self): + node = ast.parse('x').body[0].value + node.extra = extra = object() # add instance 'extra' field + context = node.ctx + + # explicit rejection of known instance fields + self.assertTrue(hasattr(node, 'extra')) + msg = "Name.__replace__ got an unexpected keyword argument 'extra'." + with self.assertRaisesRegex(TypeError, re.escape(msg)): + copy.replace(node, extra=1) + # assert that there is no side-effect + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertIs(node.extra, extra) + + def test_replace_reject_unknown_instance_fields(self): + node = ast.parse('x').body[0].value + context = node.ctx + + # explicit rejection of unknown extra fields + self.assertRaises(AttributeError, getattr, node, 'unknown') + msg = "Name.__replace__ got an unexpected keyword argument 'unknown'." + with self.assertRaisesRegex(TypeError, re.escape(msg)): + copy.replace(node, unknown=1) + # assert that there is no side-effect + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertRaises(AttributeError, getattr, node, 'unknown') class ASTHelpers_Test(unittest.TestCase): maxDiff = None diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 9ff0f488dc4fa9..5818e96d61f480 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1768,6 +1768,11 @@ def __getitem__(self, index): sum(([x] for x in range(10)), empty) self.assertEqual(empty, []) + xs = [complex(random.random() - .5, random.random() - .5) + for _ in range(10000)] + self.assertEqual(sum(xs), complex(sum(z.real for z in xs), + sum(z.imag for z in xs))) + @requires_IEEE_754 @unittest.skipIf(HAVE_DOUBLE_ROUNDING, "sum accuracy not guaranteed on machines with double rounding") @@ -1775,6 +1780,10 @@ def __getitem__(self, index): def test_sum_accuracy(self): self.assertEqual(sum([0.1] * 10), 1.0) self.assertEqual(sum([1.0, 10E100, 1.0, -10E100]), 2.0) + self.assertEqual(sum([1.0, 10E100, 1.0, -10E100, 2j]), 2+2j) + self.assertEqual(sum([2+1j, 10E100j, 1j, -10E100j]), 2+2j) + self.assertEqual(sum([1j, 1, 10E100j, 1j, 1.0, -10E100j]), 2+2j) + self.assertEqual(sum([0.1j]*10 + [fractions.Fraction(1, 10)]), 0.1+1j) def test_type(self): self.assertEqual(type(''), type('123')) diff --git a/Lib/test/test_concurrent_futures/test_init.py b/Lib/test/test_concurrent_futures/test_init.py index a36f592b79b7cf..df640929309318 100644 --- a/Lib/test/test_concurrent_futures/test_init.py +++ b/Lib/test/test_concurrent_futures/test_init.py @@ -139,6 +139,7 @@ def _test(self, test_class): def test_spawn(self): self._test(ProcessPoolSpawnFailingInitializerTest) + @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True) def test_forkserver(self): self._test(ProcessPoolForkserverFailingInitializerTest) diff --git a/Lib/test/test_copy.py b/Lib/test/test_copy.py index 89102373759ca0..3dec64cc9a2414 100644 --- a/Lib/test/test_copy.py +++ b/Lib/test/test_copy.py @@ -972,6 +972,10 @@ class C: copy.replace(c, x=1, error=2) +class MiscTestCase(unittest.TestCase): + def test__all__(self): + support.check__all__(self, copy, not_exported={"dispatch_table", "error"}) + def global_foo(x, y): return x+y diff --git a/Lib/test/test_free_threading/test_type.py b/Lib/test/test_free_threading/test_type.py index 75259795e81bcb..649676db9c08a5 100644 --- a/Lib/test/test_free_threading/test_type.py +++ b/Lib/test/test_free_threading/test_type.py @@ -106,7 +106,7 @@ class Bar: thing = Foo() def work(): foo = thing - for _ in range(10000): + for _ in range(5000): foo.__class__ = Bar type(foo) foo.__class__ = Foo diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index 30e39e7720e6d1..00def509a219c3 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -815,7 +815,6 @@ def test_annotated_op(self): """ self.run_cases_test(input, output) - def test_deopt_and_exit(self): input = """ pure op(OP, (arg1 -- out)) { @@ -827,6 +826,49 @@ def test_deopt_and_exit(self): with self.assertRaises(Exception): self.run_cases_test(input, output) + def test_array_of_one(self): + input = """ + inst(OP, (arg[1] -- out[1])) { + out[0] = arg[0]; + } + """ + output = """ + TARGET(OP) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(OP); + _PyStackRef *arg; + _PyStackRef *out; + arg = &stack_pointer[-1]; + out = &stack_pointer[-1]; + out[0] = arg[0]; + DISPATCH(); + } + """ + self.run_cases_test(input, output) + + def test_pointer_to_stackref(self): + input = """ + inst(OP, (arg: _PyStackRef * -- out)) { + out = *arg; + } + """ + output = """ + TARGET(OP) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(OP); + _PyStackRef *arg; + _PyStackRef out; + arg = (_PyStackRef *)stack_pointer[-1].bits; + out = *arg; + stack_pointer[-1] = out; + DISPATCH(); + } + """ + self.run_cases_test(input, output) + + class TestGeneratedAbstractCases(unittest.TestCase): def setUp(self) -> None: super().setUp() diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index c10f689c4ea34b..e29097baaf53ae 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -3034,13 +3034,6 @@ def test_basic_multiple_interpreters_deleted_no_reset(self): def test_basic_multiple_interpreters_reset_each(self): # resetting between each interpreter - if Py_TRACE_REFS: - # It's a Py_TRACE_REFS build. - # This test breaks interpreter isolation a little, - # which causes problems on Py_TRACE_REF builds. - # See gh-121110. - raise unittest.SkipTest('crashes on Py_TRACE_REFS builds') - # At this point: # * alive in 0 interpreters # * module def may or may not be loaded already diff --git a/Lib/test/test_importlib/test_namespace_pkgs.py b/Lib/test/test_importlib/test_namespace_pkgs.py index 072e198795d394..cbbdada3b010a7 100644 --- a/Lib/test/test_importlib/test_namespace_pkgs.py +++ b/Lib/test/test_importlib/test_namespace_pkgs.py @@ -286,25 +286,24 @@ def test_project3_succeeds(self): class ZipWithMissingDirectory(NamespacePackageTest): paths = ['missing_directory.zip'] + # missing_directory.zip contains: + # Length Date Time Name + # --------- ---------- ----- ---- + # 29 2012-05-03 18:13 foo/one.py + # 0 2012-05-03 20:57 bar/ + # 38 2012-05-03 20:57 bar/two.py + # --------- ------- + # 67 3 files - @unittest.expectedFailure def test_missing_directory(self): - # This will fail because missing_directory.zip contains: - # Length Date Time Name - # --------- ---------- ----- ---- - # 29 2012-05-03 18:13 foo/one.py - # 0 2012-05-03 20:57 bar/ - # 38 2012-05-03 20:57 bar/two.py - # --------- ------- - # 67 3 files - - # Because there is no 'foo/', the zipimporter currently doesn't - # know that foo is a namespace package - import foo.one + self.assertEqual(foo.one.attr, 'portion1 foo one') + + def test_missing_directory2(self): + import foo + self.assertFalse(hasattr(foo, 'one')) def test_present_directory(self): - # This succeeds because there is a "bar/" in the zip file import bar.two self.assertEqual(bar.two.attr, 'missing_directory foo two') diff --git a/Lib/test/test_interpreters/test_channels.py b/Lib/test/test_interpreters/test_channels.py index 68cc45d1a5e09f..6c37754142e361 100644 --- a/Lib/test/test_interpreters/test_channels.py +++ b/Lib/test/test_interpreters/test_channels.py @@ -48,6 +48,7 @@ def test_list_all(self): self.assertEqual(after, created) def test_shareable(self): + interp = interpreters.create() rch, sch = channels.create() self.assertTrue( @@ -60,8 +61,25 @@ def test_shareable(self): rch2 = rch.recv() sch2 = rch.recv() + interp.prepare_main(rch=rch, sch=sch) + sch.send_nowait(rch) + sch.send_nowait(sch) + interp.exec(dedent(""" + rch2 = rch.recv() + sch2 = rch.recv() + assert rch2 == rch + assert sch2 == sch + + sch.send_nowait(rch2) + sch.send_nowait(sch2) + """)) + rch3 = rch.recv() + sch3 = rch.recv() + self.assertEqual(rch2, rch) self.assertEqual(sch2, sch) + self.assertEqual(rch3, rch) + self.assertEqual(sch3, sch) def test_is_closed(self): rch, sch = channels.create() diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py index 4d2d54705fc894..ad7accf2099f43 100644 --- a/Lib/test/test_list.py +++ b/Lib/test/test_list.py @@ -299,6 +299,15 @@ def __eq__(self, other): lst = [X(), X()] X() in lst + def test_tier2_invalidates_iterator(self): + # GH-121012 + for _ in range(100): + a = [1, 2, 3] + it = iter(a) + for _ in it: + pass + a.append(4) + self.assertEqual(list(it), []) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_multiprocessing_fork/__init__.py b/Lib/test/test_multiprocessing_fork/__init__.py index aa1fff50b28f5f..b35e82879d7fe2 100644 --- a/Lib/test/test_multiprocessing_fork/__init__.py +++ b/Lib/test/test_multiprocessing_fork/__init__.py @@ -12,5 +12,8 @@ if sys.platform == 'darwin': raise unittest.SkipTest("test may crash on macOS (bpo-33725)") +if support.check_sanitizer(thread=True): + raise unittest.SkipTest("TSAN doesn't support threads after fork") + def load_tests(*args): return support.load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index da6d82465d29cf..1328a8695b0cca 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -653,6 +653,55 @@ def test_open_unbuffered(self): self.assertIsInstance(f, io.RawIOBase) self.assertEqual(f.read().strip(), b"this is file A") + def test_copy_file_preserve_metadata(self): + base = self.cls(self.base) + source = base / 'fileA' + if hasattr(os, 'chmod'): + os.chmod(source, stat.S_IRWXU | stat.S_IRWXO) + if hasattr(os, 'chflags') and hasattr(stat, 'UF_NODUMP'): + os.chflags(source, stat.UF_NODUMP) + source_st = source.stat() + target = base / 'copyA' + source.copy(target, preserve_metadata=True) + self.assertTrue(target.exists()) + self.assertEqual(source.read_text(), target.read_text()) + target_st = target.stat() + self.assertLessEqual(source_st.st_atime, target_st.st_atime) + self.assertLessEqual(source_st.st_mtime, target_st.st_mtime) + self.assertEqual(source_st.st_mode, target_st.st_mode) + if hasattr(source_st, 'st_flags'): + self.assertEqual(source_st.st_flags, target_st.st_flags) + + @os_helper.skip_unless_xattr + def test_copy_file_preserve_metadata_xattrs(self): + base = self.cls(self.base) + source = base / 'fileA' + os.setxattr(source, b'user.foo', b'42') + target = base / 'copyA' + source.copy(target, preserve_metadata=True) + self.assertEqual(os.getxattr(target, b'user.foo'), b'42') + + @needs_symlinks + def test_copy_link_preserve_metadata(self): + base = self.cls(self.base) + source = base / 'linkA' + if hasattr(os, 'lchmod'): + os.lchmod(source, stat.S_IRWXU | stat.S_IRWXO) + if hasattr(os, 'lchflags') and hasattr(stat, 'UF_NODUMP'): + os.lchflags(source, stat.UF_NODUMP) + source_st = source.lstat() + target = base / 'copyA' + source.copy(target, follow_symlinks=False, preserve_metadata=True) + self.assertTrue(target.exists()) + self.assertTrue(target.is_symlink()) + self.assertEqual(source.readlink(), target.readlink()) + target_st = target.lstat() + self.assertLessEqual(source_st.st_atime, target_st.st_atime) + self.assertLessEqual(source_st.st_mtime, target_st.st_mtime) + self.assertEqual(source_st.st_mode, target_st.st_mode) + if hasattr(source_st, 'st_flags'): + self.assertEqual(source_st.st_flags, target_st.st_flags) + @unittest.skipIf(sys.platform == "win32" or sys.platform == "wasi", "directories are always readable on Windows and WASI") @unittest.skipIf(root_in_posix, "test fails with root privilege") def test_copytree_no_read_permission(self): diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index 71240157e324a1..343e15a4edc14c 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -2448,6 +2448,49 @@ def test_pdb_show_attribute_and_item(): (Pdb) c """ +# doctest will modify pdb.set_trace during the test, so we need to backup +# the original function to use it in the test +original_pdb_settrace = pdb.set_trace + +def test_pdb_with_inline_breakpoint(): + """Hard-coded breakpoint() calls should invoke the same debugger instance + + >>> def test_function(): + ... x = 1 + ... import pdb; pdb.Pdb().set_trace() + ... original_pdb_settrace() + ... x = 2 + + >>> with PdbTestInput(['display x', + ... 'n', + ... 'n', + ... 'n', + ... 'n', + ... 'undisplay', + ... 'c']): + ... test_function() + > (3)test_function() + -> import pdb; pdb.Pdb().set_trace() + (Pdb) display x + display x: 1 + (Pdb) n + > (4)test_function() + -> original_pdb_settrace() + (Pdb) n + > (4)test_function() + -> original_pdb_settrace() + (Pdb) n + > (5)test_function() + -> x = 2 + (Pdb) n + --Return-- + > (5)test_function()->None + -> x = 2 + display x: 2 [old: 1] + (Pdb) undisplay + (Pdb) c + """ + def test_pdb_issue_20766(): """Test for reference leaks when the SIGINT handler is set. @@ -3545,6 +3588,23 @@ def change_file(content, filename): # the file as up to date self.assertNotIn("WARNING:", stdout) + def test_post_mortem_restart(self): + script = """ + def foo(): + raise ValueError("foo") + foo() + """ + + commands = """ + continue + restart + continue + quit + """ + + stdout, stderr = self.run_pdb_script(script, commands) + self.assertIn("Restarting", stdout) + def test_relative_imports(self): self.module_name = 't_main' os_helper.rmtree(self.module_name) diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py index 93c80467a04546..015b690566223d 100644 --- a/Lib/test/test_pyrepl/test_pyrepl.py +++ b/Lib/test/test_pyrepl/test_pyrepl.py @@ -11,6 +11,7 @@ from unittest.mock import patch from test.support import force_not_colorized from test.support import SHORT_TIMEOUT +from test.support.import_helper import import_module from test.support.os_helper import unlink from .support import ( @@ -902,6 +903,9 @@ def test_python_basic_repl(self): self.assertNotIn("Traceback", output) def test_not_wiping_history_file(self): + # skip, if readline module is not available + import_module('readline') + hfile = tempfile.NamedTemporaryFile(delete=False) self.addCleanup(unlink, hfile.name) env = os.environ.copy() @@ -927,8 +931,11 @@ def test_not_wiping_history_file(self): def run_repl(self, repl_input: str | list[str], env: dict | None = None) -> tuple[str, int]: master_fd, slave_fd = pty.openpty() + cmd = [sys.executable, "-i", "-u"] + if env is None: + cmd.append("-I") process = subprocess.Popen( - [sys.executable, "-i", "-u"], + cmd, stdin=slave_fd, stdout=slave_fd, stderr=slave_fd, diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 8b69cd03ba7f24..9412a2d737bb2e 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -1407,7 +1407,7 @@ def open_fds(): t = threading.Thread(target=open_fds) t.start() try: - with self.assertRaises(EnvironmentError): + with self.assertRaises(OSError): subprocess.Popen(NONEXISTING_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE, diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py index 0bae54d26c64f1..1861616d5ec3bf 100644 --- a/Lib/test/test_zipimport.py +++ b/Lib/test/test_zipimport.py @@ -52,8 +52,11 @@ def module_path_to_dotted_name(path): TESTMOD = "ziptestmodule" +TESTMOD2 = "ziptestmodule2" +TESTMOD3 = "ziptestmodule3" TESTPACK = "ziptestpackage" TESTPACK2 = "ziptestpackage2" +TESTPACK3 = "ziptestpackage3" TEMP_DIR = os.path.abspath("junk95142") TEMP_ZIP = os.path.abspath("junk95142.zip") TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "zipimport_data") @@ -95,8 +98,10 @@ def makeTree(self, files, dirName=TEMP_DIR): # defined by files under the directory dirName. self.addCleanup(os_helper.rmtree, dirName) - for name, (mtime, data) in files.items(): - path = os.path.join(dirName, name) + for name, data in files.items(): + if isinstance(data, tuple): + mtime, data = data + path = os.path.join(dirName, *name.split('/')) if path[-1] == os.sep: if not os.path.isdir(path): os.makedirs(path) @@ -107,22 +112,18 @@ def makeTree(self, files, dirName=TEMP_DIR): with open(path, 'wb') as fp: fp.write(data) - def makeZip(self, files, zipName=TEMP_ZIP, **kw): + def makeZip(self, files, zipName=TEMP_ZIP, *, + comment=None, file_comment=None, stuff=None, prefix='', **kw): # Create a zip archive based set of modules/packages - # defined by files in the zip file zipName. If the - # key 'stuff' exists in kw it is prepended to the archive. + # defined by files in the zip file zipName. + # If stuff is not None, it is prepended to the archive. self.addCleanup(os_helper.unlink, zipName) - with ZipFile(zipName, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - z.writestr(zinfo, data) - comment = kw.get("comment", None) + with ZipFile(zipName, "w", compression=self.compression) as z: + self.writeZip(z, files, file_comment=file_comment, prefix=prefix) if comment is not None: z.comment = comment - stuff = kw.get("stuff", None) if stuff is not None: # Prepend 'stuff' to the start of the zipfile with open(zipName, "rb") as f: @@ -131,26 +132,47 @@ def makeZip(self, files, zipName=TEMP_ZIP, **kw): f.write(stuff) f.write(data) + def writeZip(self, z, files, *, file_comment=None, prefix=''): + for name, data in files.items(): + if isinstance(data, tuple): + mtime, data = data + else: + mtime = NOW + name = name.replace(os.sep, '/') + zinfo = ZipInfo(prefix + name, time.localtime(mtime)) + zinfo.compress_type = self.compression + if file_comment is not None: + zinfo.comment = file_comment + if data is None: + zinfo.CRC = 0 + z.mkdir(zinfo) + else: + assert name[-1] != '/' + z.writestr(zinfo, data) + def getZip64Files(self): # This is the simplest way to make zipfile generate the zip64 EOCD block - return {f"f{n}.py": (NOW, test_src) for n in range(65537)} + return {f"f{n}.py": test_src for n in range(65537)} def doTest(self, expected_ext, files, *modules, **kw): + if 'prefix' not in kw: + kw['prefix'] = 'pre/fix/' self.makeZip(files, **kw) self.doTestWithPreBuiltZip(expected_ext, *modules, **kw) - def doTestWithPreBuiltZip(self, expected_ext, *modules, **kw): - sys.path.insert(0, TEMP_ZIP) + def doTestWithPreBuiltZip(self, expected_ext, *modules, + call=None, prefix='', **kw): + zip_path = os.path.join(TEMP_ZIP, *prefix.split('/')[:-1]) + sys.path.insert(0, zip_path) mod = importlib.import_module(".".join(modules)) - call = kw.get('call') if call is not None: call(mod) if expected_ext: file = mod.get_file() - self.assertEqual(file, os.path.join(TEMP_ZIP, + self.assertEqual(file, os.path.join(zip_path, *modules) + expected_ext) def testAFakeZlib(self): @@ -176,7 +198,7 @@ def testAFakeZlib(self): self.skipTest('zlib is a builtin module') if "zlib" in sys.modules: del sys.modules["zlib"] - files = {"zlib.py": (NOW, test_src)} + files = {"zlib.py": test_src} try: self.doTest(".py", files, "zlib") except ImportError: @@ -187,16 +209,16 @@ def testAFakeZlib(self): self.fail("expected test to raise ImportError") def testPy(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD) def testPyc(self): - files = {TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTMOD) def testBoth(self): - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTMOD) def testUncheckedHashBasedPyc(self): @@ -229,22 +251,22 @@ def check(mod): self.doTest(None, files, TESTMOD, call=check) def testEmptyPy(self): - files = {TESTMOD + ".py": (NOW, "")} + files = {TESTMOD + ".py": ""} self.doTest(None, files, TESTMOD) def testBadMagic(self): # make pyc magic word invalid, forcing loading from .py badmagic_pyc = bytearray(test_pyc) badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, badmagic_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: badmagic_pyc} self.doTest(".py", files, TESTMOD) def testBadMagic2(self): # make pyc magic word invalid, causing an ImportError badmagic_pyc = bytearray(test_pyc) badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit - files = {TESTMOD + pyc_ext: (NOW, badmagic_pyc)} + files = {TESTMOD + pyc_ext: badmagic_pyc} try: self.doTest(".py", files, TESTMOD) self.fail("This should not be reached") @@ -257,22 +279,22 @@ def testBadMTime(self): # flip the second bit -- not the first as that one isn't stored in the # .py's mtime in the zip archive. badtime_pyc[11] ^= 0x02 - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, badtime_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: badtime_pyc} self.doTest(".py", files, TESTMOD) def test2038MTime(self): # Make sure we can handle mtimes larger than what a 32-bit signed number # can hold. twenty_thirty_eight_pyc = make_pyc(test_co, 2**32 - 1, len(test_src)) - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, twenty_thirty_eight_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: twenty_thirty_eight_pyc} self.doTest(".py", files, TESTMOD) def testPackage(self): packdir = TESTPACK + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTMOD) def testSubPackage(self): @@ -280,9 +302,9 @@ def testSubPackage(self): # archives. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD) def testSubNamespacePackage(self): @@ -291,29 +313,104 @@ def testSubNamespacePackage(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep # The first two files are just directory entries (so have no data). - files = {packdir: (NOW, ""), - packdir2: (NOW, ""), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir: None, + packdir2: None, + packdir2 + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD) + def testPackageExplicitDirectories(self): + # Test explicit namespace packages with explicit directory entries. + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.mkdir('a') + z.writestr('a/__init__.py', test_src) + z.mkdir('a/b') + z.writestr('a/b/__init__.py', test_src) + z.mkdir('a/b/c') + z.writestr('a/b/c/__init__.py', test_src) + z.writestr('a/b/c/d.py', test_src) + self._testPackage(initfile='__init__.py') + + def testPackageImplicitDirectories(self): + # Test explicit namespace packages without explicit directory entries. + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.writestr('a/__init__.py', test_src) + z.writestr('a/b/__init__.py', test_src) + z.writestr('a/b/c/__init__.py', test_src) + z.writestr('a/b/c/d.py', test_src) + self._testPackage(initfile='__init__.py') + + def testNamespacePackageExplicitDirectories(self): + # Test implicit namespace packages with explicit directory entries. + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.mkdir('a') + z.mkdir('a/b') + z.mkdir('a/b/c') + z.writestr('a/b/c/d.py', test_src) + self._testPackage(initfile=None) + + def testNamespacePackageImplicitDirectories(self): + # Test implicit namespace packages without explicit directory entries. + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.writestr('a/b/c/d.py', test_src) + self._testPackage(initfile=None) + + def _testPackage(self, initfile): + zi = zipimport.zipimporter(os.path.join(TEMP_ZIP, 'a')) + if initfile is None: + # XXX Should it work? + self.assertRaises(zipimport.ZipImportError, zi.is_package, 'b') + self.assertRaises(zipimport.ZipImportError, zi.get_source, 'b') + self.assertRaises(zipimport.ZipImportError, zi.get_code, 'b') + else: + self.assertTrue(zi.is_package('b')) + self.assertEqual(zi.get_source('b'), test_src) + self.assertEqual(zi.get_code('b').co_filename, + os.path.join(TEMP_ZIP, 'a', 'b', initfile)) + + sys.path.insert(0, TEMP_ZIP) + self.assertNotIn('a', sys.modules) + + mod = importlib.import_module(f'a.b') + self.assertIn('a', sys.modules) + self.assertIs(sys.modules['a.b'], mod) + if initfile is None: + self.assertIsNone(mod.__file__) + else: + self.assertEqual(mod.__file__, + os.path.join(TEMP_ZIP, 'a', 'b', initfile)) + self.assertEqual(len(mod.__path__), 1, mod.__path__) + self.assertEqual(mod.__path__[0], os.path.join(TEMP_ZIP, 'a', 'b')) + + mod2 = importlib.import_module(f'a.b.c.d') + self.assertIn('a.b.c', sys.modules) + self.assertIn('a.b.c.d', sys.modules) + self.assertIs(sys.modules['a.b.c.d'], mod2) + self.assertIs(mod.c.d, mod2) + self.assertEqual(mod2.__file__, + os.path.join(TEMP_ZIP, 'a', 'b', 'c', 'd.py')) + def testMixedNamespacePackage(self): # Test implicit namespace packages spread between a # real filesystem and a zip archive. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - packdir3 = packdir2 + TESTPACK + '3' + os.sep - files1 = {packdir: (NOW, ""), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir3: (NOW, ""), - packdir3 + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + '3' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} - files2 = {packdir: (NOW, ""), - packdir + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir2 + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + packdir3 = packdir2 + TESTPACK3 + os.sep + files1 = {packdir: None, + packdir + TESTMOD + pyc_ext: test_pyc, + packdir2: None, + packdir3: None, + packdir3 + TESTMOD + pyc_ext: test_pyc, + packdir2 + TESTMOD3 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} + files2 = {packdir: None, + packdir + TESTMOD2 + pyc_ext: test_pyc, + packdir2: None, + packdir2 + TESTMOD2 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip1 = os.path.abspath("path1.zip") self.makeZip(files1, zip1) @@ -346,8 +443,8 @@ def testMixedNamespacePackage(self): mod = importlib.import_module('.'.join((TESTPACK, TESTMOD))) self.assertEqual("path1.zip", mod.__file__.split(os.sep)[-3]) - # And TESTPACK/(TESTMOD + '2') only exists in path2. - mod = importlib.import_module('.'.join((TESTPACK, TESTMOD + '2'))) + # And TESTPACK/(TESTMOD2) only exists in path2. + mod = importlib.import_module('.'.join((TESTPACK, TESTMOD2))) self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-3]) @@ -364,13 +461,13 @@ def testMixedNamespacePackage(self): self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-4]) - # subpkg.TESTMOD + '2' only exists in zip2. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '2'))) + # subpkg.TESTMOD2 only exists in zip2. + mod = importlib.import_module('.'.join((subpkg, TESTMOD2))) self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-4]) - # Finally subpkg.TESTMOD + '3' only exists in zip1. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '3'))) + # Finally subpkg.TESTMOD3 only exists in zip1. + mod = importlib.import_module('.'.join((subpkg, TESTMOD3))) self.assertEqual('path1.zip', mod.__file__.split(os.sep)[-4]) def testNamespacePackage(self): @@ -378,22 +475,22 @@ def testNamespacePackage(self): # archives. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - packdir3 = packdir2 + TESTPACK + '3' + os.sep - files1 = {packdir: (NOW, ""), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir3: (NOW, ""), - packdir3 + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + '3' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + packdir3 = packdir2 + TESTPACK3 + os.sep + files1 = {packdir: None, + packdir + TESTMOD + pyc_ext: test_pyc, + packdir2: None, + packdir3: None, + packdir3 + TESTMOD + pyc_ext: test_pyc, + packdir2 + TESTMOD3 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip1 = os.path.abspath("path1.zip") self.makeZip(files1, zip1) - files2 = {packdir: (NOW, ""), - packdir + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir2 + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files2 = {packdir: None, + packdir + TESTMOD2 + pyc_ext: test_pyc, + packdir2: None, + packdir2 + TESTMOD2 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip2 = os.path.abspath("path2.zip") self.makeZip(files2, zip2) @@ -422,8 +519,8 @@ def testNamespacePackage(self): mod = importlib.import_module('.'.join((TESTPACK, TESTMOD))) self.assertEqual("path1.zip", mod.__file__.split(os.sep)[-3]) - # And TESTPACK/(TESTMOD + '2') only exists in path2. - mod = importlib.import_module('.'.join((TESTPACK, TESTMOD + '2'))) + # And TESTPACK/(TESTMOD2) only exists in path2. + mod = importlib.import_module('.'.join((TESTPACK, TESTMOD2))) self.assertEqual("path2.zip", mod.__file__.split(os.sep)[-3]) # One level deeper... @@ -438,29 +535,22 @@ def testNamespacePackage(self): mod = importlib.import_module('.'.join((subpkg, TESTMOD))) self.assertEqual('path2.zip', mod.__file__.split(os.sep)[-4]) - # subpkg.TESTMOD + '2' only exists in zip2. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '2'))) + # subpkg.TESTMOD2 only exists in zip2. + mod = importlib.import_module('.'.join((subpkg, TESTMOD2))) self.assertEqual('path2.zip', mod.__file__.split(os.sep)[-4]) - # Finally subpkg.TESTMOD + '3' only exists in zip1. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '3'))) + # Finally subpkg.TESTMOD3 only exists in zip1. + mod = importlib.import_module('.'.join((subpkg, TESTMOD3))) self.assertEqual('path1.zip', mod.__file__.split(os.sep)[-4]) def testZipImporterMethods(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) self.assertEqual(zi.archive, TEMP_ZIP) @@ -516,35 +606,26 @@ def testZipImporterMethods(self): def testInvalidateCaches(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + extra_files = [packdir, packdir2] + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(zi._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi._get_files()), sorted([*files, *extra_files])) # Check that the file information remains accurate after reloading zi.invalidate_caches() - self.assertEqual(zi._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi._get_files()), sorted([*files, *extra_files])) # Add a new file to the ZIP archive - newfile = {"spam2" + pyc_ext: (NOW, test_pyc)} + newfile = {"spam2" + pyc_ext: test_pyc} files.update(newfile) - with ZipFile(TEMP_ZIP, "a") as z: - for name, (mtime, data) in newfile.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + with ZipFile(TEMP_ZIP, "a", compression=self.compression) as z: + self.writeZip(z, newfile, file_comment=b"spam") # Check that we can detect the new file after invalidating the cache zi.invalidate_caches() - self.assertEqual(zi._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi._get_files()), sorted([*files, *extra_files])) spec = zi.find_spec('spam2') self.assertIsNotNone(spec) self.assertIsInstance(spec.loader, zipimport.zipimporter) @@ -558,36 +639,27 @@ def testInvalidateCaches(self): def testInvalidateCachesWithMultipleZipimports(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + extra_files = [packdir, packdir2] + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(zi._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi._get_files()), sorted([*files, *extra_files])) # Zipimporter for the same path. zi2 = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(zi2._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi2._get_files()), sorted([*files, *extra_files])) # Add a new file to the ZIP archive to make the cache wrong. - newfile = {"spam2" + pyc_ext: (NOW, test_pyc)} + newfile = {"spam2" + pyc_ext: test_pyc} files.update(newfile) - with ZipFile(TEMP_ZIP, "a") as z: - for name, (mtime, data) in newfile.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + with ZipFile(TEMP_ZIP, "a", compression=self.compression) as z: + self.writeZip(z, newfile, file_comment=b"spam") # Invalidate the cache of the first zipimporter. zi.invalidate_caches() # Check that the second zipimporter detects the new file and isn't using a stale cache. - self.assertEqual(zi2._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi2._get_files()), sorted([*files, *extra_files])) spec = zi2.find_spec('spam2') self.assertIsNotNone(spec) self.assertIsInstance(spec.loader, zipimport.zipimporter) @@ -595,16 +667,9 @@ def testInvalidateCachesWithMultipleZipimports(self): def testZipImporterMethodsInSubDirectory(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} - - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"eggs" - z.writestr(zinfo, data) + files = {packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} + self.makeZip(files, file_comment=b"eggs") zi = zipimport.zipimporter(TEMP_ZIP + os.sep + packdir) self.assertEqual(zi.archive, TEMP_ZIP) @@ -650,17 +715,33 @@ def testZipImporterMethodsInSubDirectory(self): self.assertIsNone(loader.get_source(mod_name)) self.assertEqual(loader.get_filename(mod_name), mod.__file__) - def testGetData(self): + def testGetDataExplicitDirectories(self): self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - z.compression = self.compression - name = "testdata.dat" - data = bytes(x for x in range(256)) - z.writestr(name, data) - - zi = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(data, zi.get_data(name)) - self.assertIn('zipimporter object', repr(zi)) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.mkdir('a') + z.mkdir('a/b') + z.mkdir('a/b/c') + data = bytes(range(256)) + z.writestr('a/b/c/testdata.dat', data) + self._testGetData() + + def testGetDataImplicitDirectories(self): + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + data = bytes(range(256)) + z.writestr('a/b/c/testdata.dat', data) + self._testGetData() + + def _testGetData(self): + zi = zipimport.zipimporter(os.path.join(TEMP_ZIP, 'ignored')) + pathname = os.path.join('a', 'b', 'c', 'testdata.dat') + data = bytes(range(256)) + self.assertEqual(zi.get_data(pathname), data) + self.assertEqual(zi.get_data(os.path.join(TEMP_ZIP, pathname)), data) + self.assertEqual(zi.get_data(os.path.join('a', 'b', '')), b'') + self.assertEqual(zi.get_data(os.path.join(TEMP_ZIP, 'a', 'b', '')), b'') + self.assertRaises(OSError, zi.get_data, os.path.join('a', 'b')) + self.assertRaises(OSError, zi.get_data, os.path.join(TEMP_ZIP, 'a', 'b')) def testImporterAttr(self): src = """if 1: # indent hack @@ -669,9 +750,9 @@ def get_file(): if __loader__.get_data("some.data") != b"some data": raise AssertionError("bad data")\n""" pyc = make_pyc(compile(src, "", "exec"), NOW, len(src)) - files = {TESTMOD + pyc_ext: (NOW, pyc), - "some.data": (NOW, "some data")} - self.doTest(pyc_ext, files, TESTMOD) + files = {TESTMOD + pyc_ext: pyc, + "some.data": "some data"} + self.doTest(pyc_ext, files, TESTMOD, prefix='') def testDefaultOptimizationLevel(self): # zipimport should use the default optimization level (#28131) @@ -679,7 +760,7 @@ def testDefaultOptimizationLevel(self): def test(val): assert(val) return val\n""" - files = {TESTMOD + '.py': (NOW, src)} + files = {TESTMOD + '.py': src} self.makeZip(files) sys.path.insert(0, TEMP_ZIP) mod = importlib.import_module(TESTMOD) @@ -692,7 +773,7 @@ def test(val): def testImport_WithStuff(self): # try importing from a zipfile which contains additional # stuff at the beginning of the file - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, stuff=b"Some Stuff"*31) @@ -700,18 +781,18 @@ def assertModuleSource(self, module): self.assertEqual(inspect.getsource(module), test_src) def testGetSource(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, call=self.assertModuleSource) def testGetCompiledSource(self): pyc = make_pyc(compile(test_src, "", "exec"), NOW, len(test_src)) - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: pyc} self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource) def runDoctest(self, callback): - files = {TESTMOD + ".py": (NOW, test_src), - "xyz.txt": (NOW, ">>> log.append(True)\n")} + files = {TESTMOD + ".py": test_src, + "xyz.txt": ">>> log.append(True)\n"} self.doTest(".py", files, TESTMOD, call=callback) def doDoctestFile(self, module): @@ -763,29 +844,21 @@ def doTraceback(self, module): raise AssertionError("This ought to be impossible") def testTraceback(self): - files = {TESTMOD + ".py": (NOW, raise_src)} + files = {TESTMOD + ".py": raise_src} self.doTest(None, files, TESTMOD, call=self.doTraceback) @unittest.skipIf(os_helper.TESTFN_UNENCODABLE is None, "need an unencodable filename") def testUnencodable(self): filename = os_helper.TESTFN_UNENCODABLE + ".zip" - self.addCleanup(os_helper.unlink, filename) - with ZipFile(filename, "w") as z: - zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW)) - zinfo.compress_type = self.compression - z.writestr(zinfo, test_src) + self.makeZip({TESTMOD + ".py": test_src}, filename) spec = zipimport.zipimporter(filename).find_spec(TESTMOD) mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) def testBytesPath(self): filename = os_helper.TESTFN + ".zip" - self.addCleanup(os_helper.unlink, filename) - with ZipFile(filename, "w") as z: - zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW)) - zinfo.compress_type = self.compression - z.writestr(zinfo, test_src) + self.makeZip({TESTMOD + ".py": test_src}, filename) zipimport.zipimporter(filename) with self.assertRaises(TypeError): @@ -796,15 +869,15 @@ def testBytesPath(self): zipimport.zipimporter(memoryview(os.fsencode(filename))) def testComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, comment=b"comment") def testBeginningCruftAndComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, stuff=b"cruft" * 64, comment=b"hi") def testLargestPossibleComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, comment=b"c" * ((1 << 16) - 1)) @support.requires_resource('cpu') diff --git a/Lib/zipimport.py b/Lib/zipimport.py index a49a21f0799df2..68f031f89c9996 100644 --- a/Lib/zipimport.py +++ b/Lib/zipimport.py @@ -155,6 +155,8 @@ def get_data(self, pathname): toc_entry = self._get_files()[key] except KeyError: raise OSError(0, '', key) + if toc_entry is None: + return b'' return _get_data(self.archive, toc_entry) @@ -554,6 +556,22 @@ def _read_directory(archive): finally: fp.seek(start_offset) _bootstrap._verbose_message('zipimport: found {} names in {!r}', count, archive) + + # Add implicit directories. + count = 0 + for name in list(files): + while True: + i = name.rstrip(path_sep).rfind(path_sep) + if i < 0: + break + name = name[:i + 1] + if name in files: + break + files[name] = None + count += 1 + if count: + _bootstrap._verbose_message('zipimport: added {} implicit directories in {!r}', + count, archive) return files # During bootstrap, we may need to load the encodings diff --git a/Makefile.pre.in b/Makefile.pre.in index e1c793ce629b02..0bece8717ef4c0 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1115,6 +1115,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/longobject.h \ $(srcdir)/Include/cpython/memoryobject.h \ $(srcdir)/Include/cpython/methodobject.h \ + $(srcdir)/Include/cpython/modsupport.h \ $(srcdir)/Include/cpython/monitoring.h \ $(srcdir)/Include/cpython/object.h \ $(srcdir)/Include/cpython/objimpl.h \ @@ -2651,7 +2652,7 @@ inclinstall: $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(INCLUDEPY)/internal; \ else true; \ fi - @if test "$(INSTALL_MIMALLOC)" == "yes"; then \ + @if test "$(INSTALL_MIMALLOC)" = "yes"; then \ if test ! -d $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc; then \ echo "Creating directory $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc"; \ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc; \ @@ -2672,7 +2673,7 @@ inclinstall: echo $(INSTALL_DATA) $$i $(INCLUDEPY)/internal; \ $(INSTALL_DATA) $$i $(DESTDIR)$(INCLUDEPY)/internal; \ done - @if test "$(INSTALL_MIMALLOC)" == "yes"; then \ + @if test "$(INSTALL_MIMALLOC)" = "yes"; then \ echo $(INSTALL_DATA) $(srcdir)/Include/internal/mimalloc/mimalloc.h $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc.h; \ $(INSTALL_DATA) $(srcdir)/Include/internal/mimalloc/mimalloc.h $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc.h; \ for i in $(srcdir)/Include/internal/mimalloc/mimalloc/*.h; \ diff --git a/Misc/HISTORY b/Misc/HISTORY index 8ca35e1af62c05..a74d7e06acd071 100644 --- a/Misc/HISTORY +++ b/Misc/HISTORY @@ -3952,7 +3952,7 @@ Library - Issue #18626: the inspect module now offers a basic command line introspection interface (Initial patch by Claudiu Popa) -- Issue #3015: Fixed tkinter with wantobject=False. Any Tcl command call +- Issue #3015: Fixed tkinter with ``wantobjects=False``. Any Tcl command call returned empty string. - Issue #19037: The mailbox module now makes all changes to maildir files diff --git a/Misc/NEWS.d/3.10.0a1.rst b/Misc/NEWS.d/3.10.0a1.rst index 9a729a45b160eb..f30ed548e7e033 100644 --- a/Misc/NEWS.d/3.10.0a1.rst +++ b/Misc/NEWS.d/3.10.0a1.rst @@ -97,7 +97,7 @@ convention. Patch by Donghee Na. .. nonce: aJS9B3 .. section: Core and Builtins -Port the :mod:`_bisect` module to the multi-phase initialization API +Port the :mod:`!_bisect` module to the multi-phase initialization API (:pep:`489`). .. @@ -128,7 +128,7 @@ Taskaya. .. nonce: lh335O .. section: Core and Builtins -Port the :mod:`_lsprof` extension module to multi-phase initialization +Port the :mod:`!_lsprof` extension module to multi-phase initialization (:pep:`489`). .. @@ -148,7 +148,7 @@ Port the :mod:`cmath` extension module to multi-phase initialization .. nonce: jiXmyT .. section: Core and Builtins -Port the :mod:`_scproxy` extension module to multi-phase initialization +Port the :mod:`!_scproxy` extension module to multi-phase initialization (:pep:`489`). .. @@ -168,7 +168,7 @@ Port the :mod:`termios` extension module to multi-phase initialization .. nonce: QuDIut .. section: Core and Builtins -Convert the :mod:`_sha256` extension module types to heap types. +Convert the :mod:`!_sha256` extension module types to heap types. .. @@ -187,7 +187,7 @@ classes with a huge amount of arguments. Patch by Pablo Galindo. .. nonce: CnRME3 .. section: Core and Builtins -Port the :mod:`_overlapped` extension module to multi-phase initialization +Port the :mod:`!_overlapped` extension module to multi-phase initialization (:pep:`489`). .. @@ -197,7 +197,7 @@ Port the :mod:`_overlapped` extension module to multi-phase initialization .. nonce: X9CZgo .. section: Core and Builtins -Port the :mod:`_curses_panel` extension module to multi-phase initialization +Port the :mod:`!_curses_panel` extension module to multi-phase initialization (:pep:`489`). .. @@ -207,7 +207,7 @@ Port the :mod:`_curses_panel` extension module to multi-phase initialization .. nonce: 5jZymK .. section: Core and Builtins -Port the :mod:`_opcode` extension module to multi-phase initialization +Port the :mod:`!_opcode` extension module to multi-phase initialization (:pep:`489`). .. @@ -282,7 +282,7 @@ initialized ``_ast`` module. .. nonce: vcxSUa .. section: Core and Builtins -Convert :mod:`_operator` to use :c:func:`PyType_FromSpec`. +Convert :mod:`!_operator` to use :c:func:`PyType_FromSpec`. .. @@ -291,7 +291,7 @@ Convert :mod:`_operator` to use :c:func:`PyType_FromSpec`. .. nonce: fubBkb .. section: Core and Builtins -Port :mod:`_sha3` to multi-phase init. Convert static types to heap types. +Port :mod:`!_sha3` to multi-phase init. Convert static types to heap types. .. @@ -300,7 +300,7 @@ Port :mod:`_sha3` to multi-phase init. Convert static types to heap types. .. nonce: FC13e7 .. section: Core and Builtins -Port the :mod:`_blake2` extension module to the multi-phase initialization +Port the :mod:`!_blake2` extension module to the multi-phase initialization API (:pep:`489`). .. @@ -339,7 +339,7 @@ The output of ``python --help`` contains now only ASCII characters. .. nonce: O0d3ym .. section: Core and Builtins -Port the :mod:`_sha1`, :mod:`_sha512`, and :mod:`_md5` extension modules to +Port the :mod:`!_sha1`, :mod:`!_sha512`, and :mod:`!_md5` extension modules to multi-phase initialization API (:pep:`489`). .. @@ -636,7 +636,7 @@ Remove the remaining files from the old parser and the :mod:`symbol` module. .. nonce: _yI-ax .. section: Core and Builtins -Convert :mod:`_bz2` to use :c:func:`PyType_FromSpec`. +Convert :mod:`!_bz2` to use :c:func:`PyType_FromSpec`. .. @@ -666,7 +666,7 @@ by Brandt Bucher. .. nonce: 61iyYh .. section: Core and Builtins -Port :mod:`_gdbm` to multiphase initialization. +Port :mod:`!_gdbm` to multiphase initialization. .. @@ -696,7 +696,7 @@ for emitting syntax errors. Patch by Pablo Galindo. .. nonce: mmlp3Q .. section: Core and Builtins -Port :mod:`_dbm` to multiphase initialization. +Port :mod:`!_dbm` to multiphase initialization. .. @@ -1010,7 +1010,7 @@ Port :mod:`mmap` to multiphase initialization. .. nonce: Kfe9fT .. section: Core and Builtins -Port :mod:`_lzma` to multiphase initialization. +Port :mod:`!_lzma` to multiphase initialization. .. diff --git a/Misc/NEWS.d/3.10.0a2.rst b/Misc/NEWS.d/3.10.0a2.rst index 79f570439b52b8..bdf9488c81bae1 100644 --- a/Misc/NEWS.d/3.10.0a2.rst +++ b/Misc/NEWS.d/3.10.0a2.rst @@ -362,7 +362,7 @@ plistlib: fix parsing XML plists with hexadecimal integer values .. nonce: 85BsRA .. section: Library -Fix an incorrectly formatted error from :meth:`_codecs.charmap_decode` when +Fix an incorrectly formatted error from :meth:`!_codecs.charmap_decode` when called with a mapped value outside the range of valid Unicode code points. PR by Max Bernstein. diff --git a/Misc/NEWS.d/3.10.0a3.rst b/Misc/NEWS.d/3.10.0a3.rst index 179cf3e9cfb08c..2aef87ab929aab 100644 --- a/Misc/NEWS.d/3.10.0a3.rst +++ b/Misc/NEWS.d/3.10.0a3.rst @@ -1386,7 +1386,7 @@ Python already implicitly installs signal handlers: see The ``Py_TRASHCAN_BEGIN`` macro no longer accesses PyTypeObject attributes, but now can get the condition by calling the new private -:c:func:`_PyTrash_cond()` function which hides implementation details. +:c:func:`!_PyTrash_cond()` function which hides implementation details. .. diff --git a/Misc/NEWS.d/3.10.0a4.rst b/Misc/NEWS.d/3.10.0a4.rst index ae667f2bffe192..5cea16c259d5ee 100644 --- a/Misc/NEWS.d/3.10.0a4.rst +++ b/Misc/NEWS.d/3.10.0a4.rst @@ -193,7 +193,7 @@ subinterpreters. Patch by Victor Stinner. .. nonce: j7nl6A .. section: Core and Builtins -Make :c:func:`_PyUnicode_FromId` function compatible with subinterpreters. +Make :c:func:`!_PyUnicode_FromId` function compatible with subinterpreters. Each interpreter now has an array of identifier objects (interned strings decoded from UTF-8). Patch by Victor Stinner. @@ -367,7 +367,7 @@ uses "options" instead. .. nonce: Quy3zn .. section: Library -Port the :mod:`_thread` extension module to the multiphase initialization +Port the :mod:`!_thread` extension module to the multiphase initialization API (:pep:`489`) and convert its static types to heap types. .. @@ -960,8 +960,8 @@ explicitly and so not exported. .. nonce: Je08Ny .. section: C API -Remove the private :c:func:`_Py_fopen` function which is no longer needed. -Use :c:func:`_Py_wfopen` or :c:func:`_Py_fopen_obj` instead. Patch by Victor +Remove the private :c:func:`!_Py_fopen` function which is no longer needed. +Use :c:func:`!_Py_wfopen` or :c:func:`!_Py_fopen_obj` instead. Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.10.0a5.rst b/Misc/NEWS.d/3.10.0a5.rst index dc95e8ce072fd9..a85ea1ff1c2817 100644 --- a/Misc/NEWS.d/3.10.0a5.rst +++ b/Misc/NEWS.d/3.10.0a5.rst @@ -108,7 +108,7 @@ a slice at the start of the ``bytearray`` to a shorter byte string). .. nonce: WfTdfg .. section: Core and Builtins -Fix the :c:func:`_PyUnicode_FromId` function (_Py_IDENTIFIER(var) API) when +Fix the :c:func:`!_PyUnicode_FromId` function (_Py_IDENTIFIER(var) API) when :c:func:`Py_Initialize` / :c:func:`Py_Finalize` is called multiple times: preserve ``_PyRuntime.unicode_ids.next_index`` value. diff --git a/Misc/NEWS.d/3.10.0a6.rst b/Misc/NEWS.d/3.10.0a6.rst index bad3528084897b..31b7df2c61158e 100644 --- a/Misc/NEWS.d/3.10.0a6.rst +++ b/Misc/NEWS.d/3.10.0a6.rst @@ -315,7 +315,7 @@ Adds :const:`resource.RLIMIT_KQUEUES` constant from FreeBSD to the .. section: Library Make the pure Python implementation of :mod:`xml.etree.ElementTree` behave -the same as the C implementation (:mod:`_elementree`) regarding default +the same as the C implementation (:mod:`!_elementree`) regarding default attribute values (by not setting ``specified_attributes=1``). .. diff --git a/Misc/NEWS.d/3.10.0a7.rst b/Misc/NEWS.d/3.10.0a7.rst index fe6213d95a88bb..32ee34d9a68910 100644 --- a/Misc/NEWS.d/3.10.0a7.rst +++ b/Misc/NEWS.d/3.10.0a7.rst @@ -83,7 +83,7 @@ instruction dispatch a bit. .. nonce: PhaT-B .. section: Core and Builtins -Fix reference leak in the :mod:`_hashopenssl` extension. Patch by Pablo +Fix reference leak in the :mod:`!_hashopenssl` extension. Patch by Pablo Galindo. .. diff --git a/Misc/NEWS.d/3.10.0b1.rst b/Misc/NEWS.d/3.10.0b1.rst index 640f3ee58adbae..306e987a41612e 100644 --- a/Misc/NEWS.d/3.10.0b1.rst +++ b/Misc/NEWS.d/3.10.0b1.rst @@ -182,7 +182,7 @@ normally be possible, but might occur in some unusual circumstances. .. nonce: u5Y6bS .. section: Core and Builtins -Importing the :mod:`_signal` module in a subinterpreter has no longer side +Importing the :mod:`!_signal` module in a subinterpreter has no longer side effects. .. @@ -776,11 +776,11 @@ builtins.open() is now io.open(). .. nonce: o1zEk_ .. section: Library -The Python :func:`_pyio.open` function becomes a static method to behave as +The Python :func:`!_pyio.open` function becomes a static method to behave as :func:`io.open` built-in function: don't become a bound method when stored as a class variable. It becomes possible since static methods are now -callable in Python 3.10. Moreover, :func:`_pyio.OpenWrapper` becomes a -simple alias to :func:`_pyio.open`. Patch by Victor Stinner. +callable in Python 3.10. Moreover, :func:`!_pyio.OpenWrapper` becomes a +simple alias to :func:`!_pyio.open`. Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.11.0a1.rst b/Misc/NEWS.d/3.11.0a1.rst index 40fbb9d42b7944..23b13c058f96bd 100644 --- a/Misc/NEWS.d/3.11.0a1.rst +++ b/Misc/NEWS.d/3.11.0a1.rst @@ -613,7 +613,7 @@ Rename ``types.Union`` to ``types.UnionType``. .. section: Core and Builtins Expose specialization stats in python via -:func:`_opcode.get_specialization_stats`. +:func:`!_opcode.get_specialization_stats`. .. @@ -1701,7 +1701,7 @@ Remove many old deprecated :mod:`unittest` features: .. nonce: y1kEfP .. section: Library -Remove the deprecated ``split()`` method of :class:`_tkinter.TkappType`. +Remove the deprecated ``split()`` method of :class:`!_tkinter.TkappType`. Patch by Erlend E. Aasland. .. @@ -2298,9 +2298,9 @@ Adopt *binacii.a2b_base64*'s strict mode in *base64.b64decode*. .. nonce: ThuDMI .. section: Library -Fixed a bug in the :mod:`_ssl` module that was throwing :exc:`OverflowError` -when using :meth:`_ssl._SSLSocket.write` and :meth:`_ssl._SSLSocket.read` -for a big value of the ``len`` parameter. Patch by Pablo Galindo +Fixed a bug in the :mod:`!_ssl` module that was throwing :exc:`OverflowError` +when using :meth:`!_ssl._SSLSocket.write` and :meth:`!_ssl._SSLSocket.read` +for a big value of the ``len`` parameter. Patch by Pablo Galindo. .. @@ -2398,7 +2398,7 @@ class in the interactive session. Instead of :exc:`TypeError`, it should be .. nonce: R3IcM1 .. section: Library -Fix memory leak in :func:`_tkinter._flatten` if it is called with a sequence +Fix memory leak in :func:`!_tkinter._flatten` if it is called with a sequence or set, but not list or tuple. .. @@ -4187,7 +4187,7 @@ Add calls of :func:`gc.collect` in tests to support PyPy. .. nonce: mQZdXU .. section: Tests -Made tests relying on the :mod:`_asyncio` C extension module optional to +Made tests relying on the :mod:`!_asyncio` C extension module optional to allow running on alternative Python implementations. Patch by Serhiy Storchaka. diff --git a/Misc/NEWS.d/3.11.0a2.rst b/Misc/NEWS.d/3.11.0a2.rst index 05644d0a4639b1..48cf2c1e428d87 100644 --- a/Misc/NEWS.d/3.11.0a2.rst +++ b/Misc/NEWS.d/3.11.0a2.rst @@ -15,7 +15,7 @@ Improve the :exc:`SyntaxError` message when using ``True``, ``None`` or .. section: Core and Builtins :data:`sys.stdlib_module_names` now contains the macOS-specific module -:mod:`_scproxy`. +:mod:`!_scproxy`. .. @@ -1023,7 +1023,7 @@ compile shared modules. .. nonce: 61gM2A .. section: Build -:mod:`pyexpat` and :mod:`_elementtree` no longer define obsolete macros +:mod:`pyexpat` and :mod:`!_elementtree` no longer define obsolete macros ``HAVE_EXPAT_CONFIG_H`` and ``USE_PYEXPAT_CAPI``. ``XML_POOR_ENTROPY`` is now defined in ``expat_config.h``. diff --git a/Misc/NEWS.d/3.11.0a3.rst b/Misc/NEWS.d/3.11.0a3.rst index 2842aad0e163d6..6a0ae20d1fb5ed 100644 --- a/Misc/NEWS.d/3.11.0a3.rst +++ b/Misc/NEWS.d/3.11.0a3.rst @@ -27,7 +27,7 @@ invalid targets. Patch by Pablo Galindo .. nonce: 3TmTSw .. section: Core and Builtins -:c:func:`_PyErr_ChainStackItem` no longer normalizes ``exc_info`` (including +:c:func:`!_PyErr_ChainStackItem` no longer normalizes ``exc_info`` (including setting the traceback on the exception instance) because ``exc_info`` is always normalized. diff --git a/Misc/NEWS.d/3.11.0a4.rst b/Misc/NEWS.d/3.11.0a4.rst index a5ce7620016cc7..64e2f39ad9db18 100644 --- a/Misc/NEWS.d/3.11.0a4.rst +++ b/Misc/NEWS.d/3.11.0a4.rst @@ -258,7 +258,7 @@ instruction which performs the same operation, but without the loop. .. nonce: ADVaPT .. section: Core and Builtins -The code called from :c:func:`_PyErr_Display` was refactored to improve +The code called from :c:func:`!_PyErr_Display` was refactored to improve error handling. It now exits immediately upon an unrecoverable error. .. diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst index c35e8e2c1caf07..a035d0f5addbf2 100644 --- a/Misc/NEWS.d/3.11.0b1.rst +++ b/Misc/NEWS.d/3.11.0b1.rst @@ -285,7 +285,7 @@ macros. .. nonce: 11YXHQ .. section: Core and Builtins -Add a new :c:func:`_PyFrame_IsEntryFrame` API function, to check if a +Add a new :c:func:`!_PyFrame_IsEntryFrame` API function, to check if a :c:type:`PyFrameObject` is an entry frame. Patch by Pablo Galindo. .. diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst index 84d9d4e017609d..77a34124fb39e6 100644 --- a/Misc/NEWS.d/3.12.0a1.rst +++ b/Misc/NEWS.d/3.12.0a1.rst @@ -102,7 +102,7 @@ well as generator expressions. .. section: Core and Builtins Added unicode check for ``name`` attribute of ``spec`` argument passed in -:func:`_imp.create_builtin` function. +:func:`!_imp.create_builtin` function. .. @@ -483,7 +483,7 @@ Fix case of undefined behavior in ceval.c .. nonce: AfCi36 .. section: Core and Builtins -Convert :mod:`_functools` to argument clinic. +Convert :mod:`!_functools` to argument clinic. .. @@ -492,7 +492,7 @@ Convert :mod:`_functools` to argument clinic. .. nonce: wky0Fc .. section: Core and Builtins -Do not expose ``KeyWrapper`` in :mod:`_functools`. +Do not expose ``KeyWrapper`` in :mod:`!_functools`. .. @@ -1731,7 +1731,7 @@ tracing functions implemented in C. .. nonce: lenv9h .. section: Core and Builtins -:meth:`_warnings.warn_explicit` is ported to Argument Clinic. +:meth:`!_warnings.warn_explicit` is ported to Argument Clinic. .. @@ -3142,8 +3142,8 @@ test.test_codecs.EncodedFileTest`` instead. .. nonce: VhS1eS .. section: Library -Made :class:`_struct.Struct` GC-tracked in order to fix a reference leak in -the :mod:`_struct` module. +Made :class:`!_struct.Struct` GC-tracked in order to fix a reference leak in +the :mod:`!_struct` module. .. @@ -3258,7 +3258,7 @@ on the main thread Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python 3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) -function is a built-in function. Since Python 3.10, :func:`_pyio.open` is +function is a built-in function. Since Python 3.10, :func:`!_pyio.open` is also a static method. Patch by Victor Stinner. .. @@ -5610,7 +5610,7 @@ Accept os.PathLike for the argument to winsound.PlaySound Support native Windows case-insensitive path comparisons by using ``LCMapStringEx`` instead of :func:`str.lower` in :func:`ntpath.normcase`. -Add ``LCMapStringEx`` to the :mod:`_winapi` module. +Add ``LCMapStringEx`` to the :mod:`!_winapi` module. .. diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst index 88d84ad93b35b5..3626f8b1e20809 100644 --- a/Misc/NEWS.d/3.12.0a2.rst +++ b/Misc/NEWS.d/3.12.0a2.rst @@ -527,7 +527,7 @@ Stinner. .. nonce: Ai2KDh .. section: Library -Now :mod:`_pyio` is consistent with :mod:`_io` in raising ``ValueError`` +Now :mod:`!_pyio` is consistent with :mod:`!_io` in raising ``ValueError`` when executing methods over closed buffers. .. @@ -537,7 +537,7 @@ when executing methods over closed buffers. .. nonce: 0v8iyw .. section: Library -Clean up refleak on failed module initialisation in :mod:`_zoneinfo` +Clean up refleak on failed module initialisation in :mod:`!_zoneinfo` .. @@ -546,7 +546,7 @@ Clean up refleak on failed module initialisation in :mod:`_zoneinfo` .. nonce: qc_KHr .. section: Library -Clean up refleaks on failed module initialisation in :mod:`_pickle` +Clean up refleaks on failed module initialisation in :mod:`!_pickle` .. @@ -555,7 +555,7 @@ Clean up refleaks on failed module initialisation in :mod:`_pickle` .. nonce: LBl79O .. section: Library -Clean up refleak on failed module initialisation in :mod:`_io`. +Clean up refleak on failed module initialisation in :mod:`!_io`. .. diff --git a/Misc/NEWS.d/3.12.0a3.rst b/Misc/NEWS.d/3.12.0a3.rst index 07593998d80891..f6a4dc75d456f4 100644 --- a/Misc/NEWS.d/3.12.0a3.rst +++ b/Misc/NEWS.d/3.12.0a3.rst @@ -70,7 +70,7 @@ Fix bug where compiler crashes on an if expression with an empty body block. .. nonce: DcKoBJ .. section: Core and Builtins -Fix a reference bug in :func:`_imp.create_builtin()` after the creation of +Fix a reference bug in :func:`!_imp.create_builtin` after the creation of the first sub-interpreter for modules ``builtins`` and ``sys``. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/3.12.0a4.rst b/Misc/NEWS.d/3.12.0a4.rst index d7af30f6c09b2b..53e1688b802bae 100644 --- a/Misc/NEWS.d/3.12.0a4.rst +++ b/Misc/NEWS.d/3.12.0a4.rst @@ -241,7 +241,7 @@ are now always dumped, even if switched off. Improve ``BUILD_LIST`` opcode so that it works similarly to the ``BUILD_TUPLE`` opcode, by stealing references from the stack rather than repeatedly using stack operations to set list elements. Implementation -details are in a new private API :c:func:`_PyList_FromArraySteal`. +details are in a new private API :c:func:`!_PyList_FromArraySteal`. .. diff --git a/Misc/NEWS.d/3.12.0b1.rst b/Misc/NEWS.d/3.12.0b1.rst index 9f3095b224233e..7126e08a20c7fd 100644 --- a/Misc/NEWS.d/3.12.0b1.rst +++ b/Misc/NEWS.d/3.12.0b1.rst @@ -1828,7 +1828,7 @@ is relative. .. nonce: 511Tbh .. section: Library -Convert private :meth:`_posixsubprocess.fork_exec` to use Argument Clinic. +Convert private :meth:`!_posixsubprocess.fork_exec` to use Argument Clinic. .. diff --git a/Misc/NEWS.d/3.13.0a1.rst b/Misc/NEWS.d/3.13.0a1.rst index 9a321f779c24ff..0ba61b43411792 100644 --- a/Misc/NEWS.d/3.13.0a1.rst +++ b/Misc/NEWS.d/3.13.0a1.rst @@ -2888,9 +2888,9 @@ documented and were not intended to be used externally. .. nonce: vMbmj_ .. section: Library -:data:`opcode.ENABLE_SPECIALIZATION` (which was added in 3.12 but never +:data:`!opcode.ENABLE_SPECIALIZATION` (which was added in 3.12 but never documented or intended for external usage) is moved to -:data:`_opcode.ENABLE_SPECIALIZATION` where tests can access it. +:data:`!_opcode.ENABLE_SPECIALIZATION` where tests can access it. .. @@ -3053,7 +3053,7 @@ Donghee Na. .. nonce: U9nD_B .. section: Library -Optimize :meth:`_PollLikeSelector.select` for many iteration case. +Optimize :meth:`!_PollLikeSelector.select` for many iteration case. .. @@ -3173,7 +3173,7 @@ Disable tab completion in multiline mode of :mod:`pdb` .. nonce: pYSwMj .. section: Library -Expose opcode metadata through :mod:`_opcode`. +Expose opcode metadata through :mod:`!_opcode`. .. @@ -3735,7 +3735,7 @@ overwritten. .. nonce: _sZilh .. section: Library -Fix bugs in :mod:`_ctypes` where exceptions could end up being overwritten. +Fix bugs in :mod:`!_ctypes` where exceptions could end up being overwritten. .. diff --git a/Misc/NEWS.d/3.13.0a2.rst b/Misc/NEWS.d/3.13.0a2.rst index c6b2b1b263ffab..f4a637bf624d03 100644 --- a/Misc/NEWS.d/3.13.0a2.rst +++ b/Misc/NEWS.d/3.13.0a2.rst @@ -777,7 +777,7 @@ Add error checking during :mod:`!_socket` module init. .. nonce: urFYtn .. section: Library -Fix :mod:`_blake2` not checking for errors when initializing. +Fix :mod:`!_blake2` not checking for errors when initializing. .. diff --git a/Misc/NEWS.d/3.13.0a3.rst b/Misc/NEWS.d/3.13.0a3.rst index 2c660192dcd5b3..29fbe00efef76d 100644 --- a/Misc/NEWS.d/3.13.0a3.rst +++ b/Misc/NEWS.d/3.13.0a3.rst @@ -449,8 +449,8 @@ well-formed for surrogateescape encoding. Patch by Sidney Markowitz. .. nonce: N8E1zw .. section: Core and Builtins -Use the object's actual class name in :meth:`_io.FileIO.__repr__`, -:meth:`_io._WindowsConsoleIO` and :meth:`_io.TextIOWrapper.__repr__`, to +Use the object's actual class name in :meth:`!_io.FileIO.__repr__`, +:meth:`!_io._WindowsConsoleIO` and :meth:`!_io.TextIOWrapper.__repr__`, to make these methods subclass friendly. .. diff --git a/Misc/NEWS.d/3.13.0a5.rst b/Misc/NEWS.d/3.13.0a5.rst index 6d74c6bc5c4d55..d8cc88c8756a17 100644 --- a/Misc/NEWS.d/3.13.0a5.rst +++ b/Misc/NEWS.d/3.13.0a5.rst @@ -541,7 +541,7 @@ descriptors in :meth:`inspect.Signature.from_callable`. .. nonce: sGMKr0 .. section: Library -Isolate :mod:`_lsprof` (apply :pep:`687`). +Isolate :mod:`!_lsprof` (apply :pep:`687`). .. @@ -773,8 +773,8 @@ combination with unicode encoding. .. section: Library Fix :func:`io.BufferedReader.tell`, :func:`io.BufferedReader.seek`, -:func:`_pyio.BufferedReader.tell`, :func:`io.BufferedRandom.tell`, -:func:`io.BufferedRandom.seek` and :func:`_pyio.BufferedRandom.tell` being +:func:`!_pyio.BufferedReader.tell`, :func:`io.BufferedRandom.tell`, +:func:`io.BufferedRandom.seek` and :func:`!_pyio.BufferedRandom.tell` being able to return negative offsets. .. diff --git a/Misc/NEWS.d/3.13.0a6.rst b/Misc/NEWS.d/3.13.0a6.rst index fff29083e0dab7..0cdbb8232250d7 100644 --- a/Misc/NEWS.d/3.13.0a6.rst +++ b/Misc/NEWS.d/3.13.0a6.rst @@ -550,7 +550,7 @@ or DuplicateOptionError. .. nonce: PBiRQB .. section: Library -:class:`_io.WindowsConsoleIO` now emit a warning if a boolean value is +:class:`!_io.WindowsConsoleIO` now emit a warning if a boolean value is passed as a filedescriptor argument. .. diff --git a/Misc/NEWS.d/3.13.0b1.rst b/Misc/NEWS.d/3.13.0b1.rst index ab5f24fe345af9..831ba623765df7 100644 --- a/Misc/NEWS.d/3.13.0b1.rst +++ b/Misc/NEWS.d/3.13.0b1.rst @@ -666,7 +666,7 @@ by :pep:`738`. .. section: Library Allow to specify the signature of custom callable instances of extension -type by the :attr:`__text_signature__` attribute. Specify signatures of +type by the ``__text_signature__`` attribute. Specify signatures of :class:`operator.attrgetter`, :class:`operator.itemgetter`, and :class:`operator.methodcaller` instances. @@ -687,10 +687,10 @@ padding is not detected when no padding is necessary. .. nonce: 5N2Xcy .. section: Library -Add the :class:`!PhotoImage` methods :meth:`~tkinter.PhotoImage.read` to -read an image from a file and :meth:`~tkinter.PhotoImage.data` to get the +Add the :class:`!PhotoImage` methods :meth:`!read` to +read an image from a file and :meth:`!data` to get the image data. Add *background* and *grayscale* parameters to -:class:`!PhotoImage` method :meth:`~tkinter.PhotoImage.write`. +:class:`!PhotoImage` method :meth:`!write`. .. @@ -855,7 +855,7 @@ is used to bind indexed, nameless placeholders. See also :gh:`100668`. .. nonce: RstWg- .. section: Library -Fix TypeError in :func:`email.Message.get_payload` when the charset is +Fix TypeError in :func:`email.message.Message.get_payload` when the charset is :rfc:`2231` encoded. .. @@ -953,7 +953,7 @@ Speed up :meth:`pathlib.Path.walk` by working with strings internally. .. nonce: oxIUEI .. section: Library -Change the new multi-separator support in :meth:`asyncio.Stream.readuntil` +Change the new multi-separator support in :meth:`asyncio.StreamReader.readuntil` to only accept tuples of separators rather than arbitrary iterables. .. @@ -1260,7 +1260,7 @@ Support opcode events in :mod:`bdb` .. nonce: YoI8TV .. section: Library -:mod:`ncurses`: fixed a crash that could occur on macOS 13 or earlier when +:mod:`!ncurses`: fixed a crash that could occur on macOS 13 or earlier when Python was built with Apple Xcode 15's SDK. .. @@ -1347,13 +1347,13 @@ urllib. .. nonce: du4UKW .. section: Library -Setting the :mod:`!tkinter` module global :data:`~tkinter.wantobject` to ``2`` +Setting the :mod:`!tkinter` module global :data:`!wantobjects` to ``2`` before creating the :class:`~tkinter.Tk` object or call the -:meth:`~tkinter.Tk.wantobject` method of the :class:`!Tk` object with argument +:meth:`!wantobjects` method of the :class:`!Tk` object with argument ``2`` makes now arguments to callbacks registered in the :mod:`tkinter` module to be passed as various Python objects (``int``, ``float``, ``bytes``, ``tuple``), depending on their internal representation in Tcl, instead of always ``str``. -:data:`!tkinter.wantobject` is now set to ``2`` by default. +:data:`!tkinter.wantobjects` is now set to ``2`` by default. .. diff --git a/Misc/NEWS.d/3.5.0a1.rst b/Misc/NEWS.d/3.5.0a1.rst index 442ab62fee8185..35f340f503df18 100644 --- a/Misc/NEWS.d/3.5.0a1.rst +++ b/Misc/NEWS.d/3.5.0a1.rst @@ -3447,7 +3447,8 @@ tkinter.ttk now works when default root window is not set. .. nonce: FE_PII .. section: Library -_tkinter.create() now creates tkapp object with wantobject=1 by default. +``_tkinter.create()`` now creates ``tkapp`` object with ``wantobjects=1`` by +default. .. diff --git a/Misc/NEWS.d/3.6.0a1.rst b/Misc/NEWS.d/3.6.0a1.rst index 5c9a6e5d64b469..803c9fc5925fa6 100644 --- a/Misc/NEWS.d/3.6.0a1.rst +++ b/Misc/NEWS.d/3.6.0a1.rst @@ -1484,9 +1484,9 @@ on UNIX signals (SIGSEGV, SIGFPE, SIGABRT). .. nonce: RWN1jR .. section: Library -Add C functions :c:func:`_PyTraceMalloc_Track` and -:c:func:`_PyTraceMalloc_Untrack` to track memory blocks using the -:mod:`tracemalloc` module. Add :c:func:`_PyTraceMalloc_GetTraceback` to get +Add C functions :c:func:`!_PyTraceMalloc_Track` and +:c:func:`!_PyTraceMalloc_Untrack` to track memory blocks using the +:mod:`tracemalloc` module. Add :c:func:`!_PyTraceMalloc_GetTraceback` to get the traceback of an object. .. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index 9decc4034d6b87..35b9e7fca27a7b 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -2519,7 +2519,7 @@ non-Windows systems. .. nonce: dQS1ng .. section: Library -Fix incorrect parsing of :class:`_io.IncrementalNewlineDecoder`'s +Fix incorrect parsing of :class:`io.IncrementalNewlineDecoder`'s *translate* argument. .. @@ -8051,7 +8051,7 @@ Update macOS 10.9+ installer to Tcl/Tk 8.6.8. .. nonce: K6jCVG .. section: macOS -In :mod:`_scproxy`, drop the GIL when calling into ``SystemConfiguration`` +In :mod:`!_scproxy`, drop the GIL when calling into ``SystemConfiguration`` to avoid deadlocks. .. diff --git a/Misc/NEWS.d/3.8.0a4.rst b/Misc/NEWS.d/3.8.0a4.rst index 7bf0de1210935b..edce71b2555a89 100644 --- a/Misc/NEWS.d/3.8.0a4.rst +++ b/Misc/NEWS.d/3.8.0a4.rst @@ -945,7 +945,7 @@ P. Hemsley. .. nonce: __FTq9 .. section: Tests -Add a new :mod:`_testinternalcapi` module to test the internal C API. +Add a new :mod:`!_testinternalcapi` module to test the internal C API. .. @@ -1383,7 +1383,7 @@ Since Python 3.7.0, calling :c:func:`Py_DecodeLocale` before coerced and/or if the UTF-8 Mode is enabled by the user configuration. The LC_CTYPE coercion and UTF-8 Mode are now disabled by default to fix the mojibake issue. They must now be enabled explicitly (opt-in) using the new -:c:func:`_Py_PreInitialize` API with ``_PyPreConfig``. +:c:func:`!_Py_PreInitialize` API with ``_PyPreConfig``. .. diff --git a/Misc/NEWS.d/3.8.0b1.rst b/Misc/NEWS.d/3.8.0b1.rst index 4174ab8fac6192..fc4e3a9bd887fb 100644 --- a/Misc/NEWS.d/3.8.0b1.rst +++ b/Misc/NEWS.d/3.8.0b1.rst @@ -600,7 +600,7 @@ default. .. nonce: sLULGQ .. section: Library -Fix destructor :class:`_pyio.BytesIO` and :class:`_pyio.TextIOWrapper`: +Fix destructor :class:`!_pyio.BytesIO` and :class:`!_pyio.TextIOWrapper`: initialize their ``_buffer`` attribute as soon as possible (in the class body), because it's used by ``__del__()`` which calls ``close()``. diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index a38b93e4b76d17..b0f63c3b9c3537 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -1384,7 +1384,7 @@ Nested subclasses of :class:`typing.NamedTuple` are now pickleable. .. nonce: hwrPN7 .. section: Library -Prevent :exc:`KeyError` thrown by :func:`_encoded_words.decode` when given +Prevent :exc:`KeyError` thrown by :func:`!_encoded_words.decode` when given an encoded-word with invalid content-type encoding from propagating all the way to :func:`email.message.get`. @@ -1395,7 +1395,7 @@ way to :func:`email.message.get`. .. nonce: S6Klvm .. section: Library -Deprecated the ``split()`` method in :class:`_tkinter.TkappType` in favour +Deprecated the ``split()`` method in :class:`!_tkinter.TkappType` in favour of the ``splitlist()`` method which has more consistent and predictable behavior. @@ -3013,7 +3013,7 @@ thread was still running. .. section: Library Allow pure Python implementation of :mod:`pickle` to work even when the C -:mod:`_pickle` module is unavailable. +:mod:`!_pickle` module is unavailable. .. @@ -3064,8 +3064,8 @@ internal tasks weak set is changed by another thread during iteration. .. nonce: ADqCkq .. section: Library -:class:`_pyio.IOBase` destructor now does nothing if getting the ``closed`` -attribute fails to better mimic :class:`_io.IOBase` finalizer. +:class:`!_pyio.IOBase` destructor now does nothing if getting the ``closed`` +attribute fails to better mimic :class:`!_io.IOBase` finalizer. .. @@ -4993,7 +4993,7 @@ Make :const:`winreg.REG_MULTI_SZ` support zero-length strings. .. section: Windows Replace use of :c:func:`strcasecmp` for the system function -:c:func:`_stricmp`. Patch by Minmin Gong. +:c:func:`!_stricmp`. Patch by Minmin Gong. .. @@ -5696,8 +5696,8 @@ Add :c:func:`PyConfig_SetWideStringList` function. .. section: C API Add fast functions for calling methods: -:c:func:`_PyObject_VectorcallMethod`, :c:func:`_PyObject_CallMethodNoArgs` -and :c:func:`_PyObject_CallMethodOneArg`. +:c:func:`!_PyObject_VectorcallMethod`, :c:func:`!_PyObject_CallMethodNoArgs` +and :c:func:`!_PyObject_CallMethodOneArg`. .. diff --git a/Misc/NEWS.d/3.9.0a6.rst b/Misc/NEWS.d/3.9.0a6.rst index b7ea1051c314f2..4ba4cfe818c2d0 100644 --- a/Misc/NEWS.d/3.9.0a6.rst +++ b/Misc/NEWS.d/3.9.0a6.rst @@ -111,7 +111,7 @@ str.decode(). .. nonce: m15TTX .. section: Core and Builtins -Fix possible refleaks in :mod:`_json`, memo of PyScannerObject should be +Fix possible refleaks in :mod:`!_json`, memo of PyScannerObject should be traversed. .. @@ -666,8 +666,8 @@ for _main_thread, instead of a _DummyThread instance. .. nonce: VTq_8s .. section: Library -Add a private ``_at_fork_reinit()`` method to :class:`_thread.Lock`, -:class:`_thread.RLock`, :class:`threading.RLock` and +Add a private ``_at_fork_reinit()`` method to :class:`!_thread.Lock`, +:class:`!_thread.RLock`, :class:`threading.RLock` and :class:`threading.Condition` classes: reinitialize the lock at fork in the child process, reset the lock to the unlocked state. Rename also the private ``_reset_internal_locks()`` method of :class:`threading.Event` to diff --git a/Misc/NEWS.d/next/Build/2024-07-08-01-11-54.gh-issue-121467.3qWRQj.rst b/Misc/NEWS.d/next/Build/2024-07-08-01-11-54.gh-issue-121467.3qWRQj.rst new file mode 100644 index 00000000000000..a2238475546eaa --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-07-08-01-11-54.gh-issue-121467.3qWRQj.rst @@ -0,0 +1 @@ +Fix a Makefile bug that prevented mimalloc header files from being installed. diff --git a/Misc/NEWS.d/next/Build/2024-07-08-14-01-17.gh-issue-121487.ekHmpR.rst b/Misc/NEWS.d/next/Build/2024-07-08-14-01-17.gh-issue-121487.ekHmpR.rst new file mode 100644 index 00000000000000..e30d4dcdbfe779 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-07-08-14-01-17.gh-issue-121487.ekHmpR.rst @@ -0,0 +1 @@ +Fix deprecation warning for ATOMIC_VAR_INIT in mimalloc. diff --git a/Misc/NEWS.d/next/C API/2024-07-02-11-03-40.gh-issue-112136.f3fiY8.rst b/Misc/NEWS.d/next/C API/2024-07-02-11-03-40.gh-issue-112136.f3fiY8.rst new file mode 100644 index 00000000000000..a240b4e852c4d1 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-07-02-11-03-40.gh-issue-112136.f3fiY8.rst @@ -0,0 +1,3 @@ +Restore the private ``_PyArg_Parser`` structure and the private +``_PyArg_ParseTupleAndKeywordsFast()`` function, previously removed in Python +3.13 alpha 1. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2024-07-09-15-55-20.gh-issue-89364.yYYroI.rst b/Misc/NEWS.d/next/C_API/2024-07-09-15-55-20.gh-issue-89364.yYYroI.rst new file mode 100644 index 00000000000000..b82e78446e4e87 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2024-07-09-15-55-20.gh-issue-89364.yYYroI.rst @@ -0,0 +1,3 @@ +Export the :c:func:`PySignal_SetWakeupFd` function. Previously, the function +was documented but it couldn't be used in 3rd party code. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- b/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- deleted file mode 100644 index 29f06d43c3598c..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- +++ /dev/null @@ -1 +0,0 @@ -Support Linux perf profiler to see Python calls on RISC-V architecture diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-12-12-29-45.gh-issue-120400.lZYHVS.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-12-12-29-45.gh-issue-120400.lZYHVS.rst deleted file mode 100644 index 8c86d4750e39a8..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2024-06-12-12-29-45.gh-issue-120400.lZYHVS.rst +++ /dev/null @@ -1 +0,0 @@ -Support Linux perf profiler to see Python calls on RISC-V architecture. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-19-19-54-35.gh-issue-120754.uF29sj.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-19-19-54-35.gh-issue-120754.uF29sj.rst new file mode 100644 index 00000000000000..46481d8f31aaba --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-06-19-19-54-35.gh-issue-120754.uF29sj.rst @@ -0,0 +1 @@ +Reduce the number of system calls invoked when reading a whole file (ex. ``open('a.txt').read()``). For a sample program that reads the contents of the 400+ ``.rst`` files in the cpython repository ``Doc`` folder, there is an over 10% reduction in system call count. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-30-03-48-10.gh-issue-121149.lLBMKe.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-30-03-48-10.gh-issue-121149.lLBMKe.rst new file mode 100644 index 00000000000000..38d618f06090fd --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-06-30-03-48-10.gh-issue-121149.lLBMKe.rst @@ -0,0 +1,2 @@ +Added specialization for summation of complexes, this also improves accuracy +of builtin :func:`sum` for such inputs. Patch by Sergey B Kirpichev. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-04-23-38-30.gh-issue-121368.m3EF9E.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-04-23-38-30.gh-issue-121368.m3EF9E.rst new file mode 100644 index 00000000000000..3df5b216cbc0af --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-04-23-38-30.gh-issue-121368.m3EF9E.rst @@ -0,0 +1,3 @@ +Fix race condition in ``_PyType_Lookup`` in the free-threaded build due to +a missing memory fence. This could lead to ``_PyType_Lookup`` returning +incorrect results on arm64. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-05-11-29-27.gh-issue-121288.lYKYYP.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-05-11-29-27.gh-issue-121288.lYKYYP.rst new file mode 100644 index 00000000000000..bd3e20b5658562 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-05-11-29-27.gh-issue-121288.lYKYYP.rst @@ -0,0 +1,5 @@ +:exc:`ValueError` messages for :meth:`!list.index()`, :meth:`!range.index()`, +:meth:`!deque.index()`, :meth:`!deque.remove()` and +:meth:`!ShareableList.index()` no longer contain the repr of the searched +value (which can be arbitrary large) and are consistent with error messages +for other :meth:`!index()` and :meth:`!remove()` methods. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-08-02-24-55.gh-issue-121439.jDHod3.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-08-02-24-55.gh-issue-121439.jDHod3.rst new file mode 100644 index 00000000000000..361f9fc71186c6 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-08-02-24-55.gh-issue-121439.jDHod3.rst @@ -0,0 +1 @@ +Allow tuples of length 20 in the freelist to be reused. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-08-10-31-08.gh-issue-121012.M5hHk-.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-08-10-31-08.gh-issue-121012.M5hHk-.rst new file mode 100644 index 00000000000000..7b04eb68b03752 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-08-10-31-08.gh-issue-121012.M5hHk-.rst @@ -0,0 +1,2 @@ +Tier 2 execution now ensures that list iterators remain exhausted, once they +become exhausted. diff --git a/Misc/NEWS.d/next/Library/2023-04-24-05-34-23.gh-issue-103194.GwBwWL.rst b/Misc/NEWS.d/next/Library/2023-04-24-05-34-23.gh-issue-103194.GwBwWL.rst index 3f70168b81069e..bc9187309c6a53 100644 --- a/Misc/NEWS.d/next/Library/2023-04-24-05-34-23.gh-issue-103194.GwBwWL.rst +++ b/Misc/NEWS.d/next/Library/2023-04-24-05-34-23.gh-issue-103194.GwBwWL.rst @@ -1,4 +1,4 @@ Prepare Tkinter for C API changes in Tcl 8.7/9.0 to avoid -:class:`_tkinter.Tcl_Obj` being unexpectedly returned +:class:`!_tkinter.Tcl_Obj` being unexpectedly returned instead of :class:`bool`, :class:`str`, :class:`bytearray`, or :class:`int`. diff --git a/Misc/NEWS.d/next/Library/2024-05-07-17-38-53.gh-issue-118714.XXKpVZ.rst b/Misc/NEWS.d/next/Library/2024-05-07-17-38-53.gh-issue-118714.XXKpVZ.rst new file mode 100644 index 00000000000000..f41baee303482a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-05-07-17-38-53.gh-issue-118714.XXKpVZ.rst @@ -0,0 +1,2 @@ +Allow ``restart`` in post-mortem debugging of :mod:`pdb`. Removed restart message +when the user quits pdb from post-mortem mode. diff --git a/Misc/NEWS.d/next/Library/2024-05-08-18-33-07.gh-issue-118507.OCQsAY.rst b/Misc/NEWS.d/next/Library/2024-05-08-18-33-07.gh-issue-118507.OCQsAY.rst index de1462f0d24fce..67b1fea4f83cb4 100644 --- a/Misc/NEWS.d/next/Library/2024-05-08-18-33-07.gh-issue-118507.OCQsAY.rst +++ b/Misc/NEWS.d/next/Library/2024-05-08-18-33-07.gh-issue-118507.OCQsAY.rst @@ -1 +1,2 @@ +Fix :func:`os.path.isfile` on Windows for pipes. Speedup :func:`os.path.isjunction` and :func:`os.path.lexists` on Windows with a native implementation. diff --git a/Misc/NEWS.d/next/Library/2024-06-07-10-10-32.gh-issue-117983.NeMR9n.rst b/Misc/NEWS.d/next/Library/2024-06-07-10-10-32.gh-issue-117983.NeMR9n.rst new file mode 100644 index 00000000000000..cca97f50a20496 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-07-10-10-32.gh-issue-117983.NeMR9n.rst @@ -0,0 +1,2 @@ +Defer the ``threading`` import in ``importlib.util`` until lazy loading is +used. diff --git a/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst b/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst index eac5bab3e9fe6d..346a89879cad41 100644 --- a/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst +++ b/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst @@ -1,3 +1,3 @@ -Fixed issues where :meth:`!argparse.ArgumentParser.parses_args` did not honor +Fixed issues where :meth:`!argparse.ArgumentParser.parse_args` did not honor ``exit_on_error=False``. Based on patch by Ben Hsing. diff --git a/Misc/NEWS.d/next/Library/2024-06-29-15-21-12.gh-issue-121141.4evD6q.rst b/Misc/NEWS.d/next/Library/2024-06-29-15-21-12.gh-issue-121141.4evD6q.rst new file mode 100644 index 00000000000000..f2dc621050ff4b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-29-15-21-12.gh-issue-121141.4evD6q.rst @@ -0,0 +1 @@ +Add support for :func:`copy.replace` to AST nodes. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-06-29-19-30-15.gh-issue-121163.SJKDFq.rst b/Misc/NEWS.d/next/Library/2024-06-29-19-30-15.gh-issue-121163.SJKDFq.rst index 029838030278a6..50f945ab9f1436 100644 --- a/Misc/NEWS.d/next/Library/2024-06-29-19-30-15.gh-issue-121163.SJKDFq.rst +++ b/Misc/NEWS.d/next/Library/2024-06-29-19-30-15.gh-issue-121163.SJKDFq.rst @@ -1,3 +1,2 @@ Add support for ``all`` as an valid ``action`` for :func:`warnings.simplefilter` -and :func:`warnings.filterswarnings`. - +and :func:`warnings.filterwarnings`. diff --git a/Misc/NEWS.d/next/Library/2024-07-04-17-36-03.gh-issue-59110.IlI9Fz.rst b/Misc/NEWS.d/next/Library/2024-07-04-17-36-03.gh-issue-59110.IlI9Fz.rst new file mode 100644 index 00000000000000..b8e3ee0720cfe6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-04-17-36-03.gh-issue-59110.IlI9Fz.rst @@ -0,0 +1,2 @@ +:mod:`zipimport` supports now namespace packages when no directory entry +exists. diff --git a/Misc/NEWS.d/next/Library/2024-07-06-16-08-39.gh-issue-119169.o0YymL.rst b/Misc/NEWS.d/next/Library/2024-07-06-16-08-39.gh-issue-119169.o0YymL.rst new file mode 100644 index 00000000000000..5d9b50d452a9cd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-06-16-08-39.gh-issue-119169.o0YymL.rst @@ -0,0 +1 @@ +Slightly speed up :func:`os.walk` by simplifying exception handling. diff --git a/Misc/NEWS.d/next/Library/2024-07-06-23-39-38.gh-issue-121450.vGqb3c.rst b/Misc/NEWS.d/next/Library/2024-07-06-23-39-38.gh-issue-121450.vGqb3c.rst new file mode 100644 index 00000000000000..4a65fb737f025b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-06-23-39-38.gh-issue-121450.vGqb3c.rst @@ -0,0 +1,4 @@ +Hard-coded breakpoints (:func:`breakpoint` and :func:`pdb.set_trace()`) now +reuse the most recent ``Pdb`` instance that calls ``Pdb.set_trace()``, +instead of creating a new one each time. As a result, all the instance specific +data like ``display`` and ``commands`` are preserved across Hard-coded breakpoints. diff --git a/Misc/NEWS.d/next/Tests/2024-07-04-15-10-29.gh-issue-121084.qxcd5d.rst b/Misc/NEWS.d/next/Tests/2024-07-04-15-10-29.gh-issue-121084.qxcd5d.rst new file mode 100644 index 00000000000000..b91ea8acfadbf1 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-07-04-15-10-29.gh-issue-121084.qxcd5d.rst @@ -0,0 +1,3 @@ +Fix test_typing random leaks. Clear typing ABC caches when running tests for +refleaks (``-R`` option): call ``_abc_caches_clear()`` on typing abstract +classes and their subclasses. Patch by Victor Stinner. diff --git a/Modules/Setup.bootstrap.in b/Modules/Setup.bootstrap.in index aa4e60e272653b..4dcc0f55176d0e 100644 --- a/Modules/Setup.bootstrap.in +++ b/Modules/Setup.bootstrap.in @@ -30,6 +30,7 @@ _weakref _weakref.c _abc _abc.c _functools _functoolsmodule.c _locale _localemodule.c +_opcode _opcode.c _operator _operator.c _stat _stat.c _symtable symtablemodule.c diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 78b979698fcd75..dfc75077650df8 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -36,7 +36,6 @@ @MODULE__HEAPQ_TRUE@_heapq _heapqmodule.c @MODULE__JSON_TRUE@_json _json.c @MODULE__LSPROF_TRUE@_lsprof _lsprof.c rotatingtree.c -@MODULE__OPCODE_TRUE@_opcode _opcode.c @MODULE__PICKLE_TRUE@_pickle _pickle.c @MODULE__QUEUE_TRUE@_queue _queuemodule.c @MODULE__RANDOM_TRUE@_random _randommodule.c diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index 0bc61db4117c5d..fbfed59995c21e 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -1293,7 +1293,7 @@ deque_index_impl(dequeobject *deque, PyObject *v, Py_ssize_t start, index = 0; } } - PyErr_Format(PyExc_ValueError, "%R is not in deque", v); + PyErr_SetString(PyExc_ValueError, "deque.index(x): x not in deque"); return NULL; } @@ -1462,7 +1462,7 @@ deque_remove_impl(dequeobject *deque, PyObject *value) } } if (i == n) { - PyErr_Format(PyExc_ValueError, "%R is not in deque", value); + PyErr_SetString(PyExc_ValueError, "deque.remove(x): x not in deque"); return NULL; } rv = deque_del_item(deque, i); diff --git a/Modules/_interpchannelsmodule.c b/Modules/_interpchannelsmodule.c index ff8dacf5bd1ad0..f0447475c49116 100644 --- a/Modules/_interpchannelsmodule.c +++ b/Modules/_interpchannelsmodule.c @@ -2615,10 +2615,10 @@ _get_current_channelend_type(int end) } if (cls == NULL) { // Force the module to be loaded, to register the type. - PyObject *highlevel = PyImport_ImportModule("interpreters.channel"); + PyObject *highlevel = PyImport_ImportModule("interpreters.channels"); if (highlevel == NULL) { PyErr_Clear(); - highlevel = PyImport_ImportModule("test.support.interpreters.channel"); + highlevel = PyImport_ImportModule("test.support.interpreters.channels"); if (highlevel == NULL) { return NULL; } @@ -2977,7 +2977,7 @@ channelsmod_send(PyObject *self, PyObject *args, PyObject *kwds) } PyDoc_STRVAR(channelsmod_send_doc, -"channel_send(cid, obj, blocking=True)\n\ +"channel_send(cid, obj, *, blocking=True, timeout=None)\n\ \n\ Add the object's data to the channel's queue.\n\ By default this waits for the object to be received."); @@ -3027,7 +3027,7 @@ channelsmod_send_buffer(PyObject *self, PyObject *args, PyObject *kwds) } PyDoc_STRVAR(channelsmod_send_buffer_doc, -"channel_send_buffer(cid, obj, blocking=True)\n\ +"channel_send_buffer(cid, obj, *, blocking=True, timeout=None)\n\ \n\ Add the object's buffer to the channel's queue.\n\ By default this waits for the object to be received."); diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index b5129ffcbffdcf..5d9d87d6118a75 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -54,6 +54,9 @@ # define SMALLCHUNK BUFSIZ #endif +/* Size at which a buffer is considered "large" and behavior should change to + avoid excessive memory allocation */ +#define LARGE_BUFFER_CUTOFF_SIZE 65536 /*[clinic input] module _io @@ -72,6 +75,7 @@ typedef struct { unsigned int closefd : 1; char finalizing; unsigned int blksize; + Py_off_t estimated_size; PyObject *weakreflist; PyObject *dict; } fileio; @@ -196,6 +200,7 @@ fileio_new(PyTypeObject *type, PyObject *args, PyObject *kwds) self->appending = 0; self->seekable = -1; self->blksize = 0; + self->estimated_size = -1; self->closefd = 1; self->weakreflist = NULL; } @@ -482,6 +487,9 @@ _io_FileIO___init___impl(fileio *self, PyObject *nameobj, const char *mode, if (fdfstat.st_blksize > 1) self->blksize = fdfstat.st_blksize; #endif /* HAVE_STRUCT_STAT_ST_BLKSIZE */ + if (fdfstat.st_size < PY_SSIZE_T_MAX) { + self->estimated_size = (Py_off_t)fdfstat.st_size; + } } #if defined(MS_WINDOWS) || defined(__CYGWIN__) @@ -684,7 +692,7 @@ new_buffersize(fileio *self, size_t currentsize) giving us amortized linear-time behavior. For bigger sizes, use a less-than-double growth factor to avoid excessive allocation. */ assert(currentsize <= PY_SSIZE_T_MAX); - if (currentsize > 65536) + if (currentsize > LARGE_BUFFER_CUTOFF_SIZE) addend = currentsize >> 3; else addend = 256 + currentsize; @@ -707,43 +715,56 @@ static PyObject * _io_FileIO_readall_impl(fileio *self) /*[clinic end generated code: output=faa0292b213b4022 input=dbdc137f55602834]*/ { - struct _Py_stat_struct status; Py_off_t pos, end; PyObject *result; Py_ssize_t bytes_read = 0; Py_ssize_t n; size_t bufsize; - int fstat_result; - if (self->fd < 0) + if (self->fd < 0) { return err_closed(); + } - Py_BEGIN_ALLOW_THREADS - _Py_BEGIN_SUPPRESS_IPH -#ifdef MS_WINDOWS - pos = _lseeki64(self->fd, 0L, SEEK_CUR); -#else - pos = lseek(self->fd, 0L, SEEK_CUR); -#endif - _Py_END_SUPPRESS_IPH - fstat_result = _Py_fstat_noraise(self->fd, &status); - Py_END_ALLOW_THREADS - - if (fstat_result == 0) - end = status.st_size; - else - end = (Py_off_t)-1; - - if (end > 0 && end >= pos && pos >= 0 && end - pos < PY_SSIZE_T_MAX) { + end = self->estimated_size; + if (end <= 0) { + /* Use a default size and resize as needed. */ + bufsize = SMALLCHUNK; + } + else { /* This is probably a real file, so we try to allocate a buffer one byte larger than the rest of the file. If the calculation is right then we should get EOF without having to enlarge the buffer. */ - bufsize = (size_t)(end - pos + 1); - } else { - bufsize = SMALLCHUNK; + if (end > _PY_READ_MAX - 1) { + bufsize = _PY_READ_MAX; + } + else { + bufsize = (size_t)end + 1; + } + + /* While a lot of code does open().read() to get the whole contents + of a file it is possible a caller seeks/reads a ways into the file + then calls readall() to get the rest, which would result in allocating + more than required. Guard against that for larger files where we expect + the I/O time to dominate anyways while keeping small files fast. */ + if (bufsize > LARGE_BUFFER_CUTOFF_SIZE) { + Py_BEGIN_ALLOW_THREADS + _Py_BEGIN_SUPPRESS_IPH +#ifdef MS_WINDOWS + pos = _lseeki64(self->fd, 0L, SEEK_CUR); +#else + pos = lseek(self->fd, 0L, SEEK_CUR); +#endif + _Py_END_SUPPRESS_IPH + Py_END_ALLOW_THREADS + + if (end >= pos && pos >= 0 && (end - pos) < (_PY_READ_MAX - 1)) { + bufsize = (size_t)(end - pos) + 1; + } + } } + result = PyBytes_FromStringAndSize(NULL, bufsize); if (result == NULL) return NULL; @@ -783,7 +804,6 @@ _io_FileIO_readall_impl(fileio *self) return NULL; } bytes_read += n; - pos += n; } if (PyBytes_GET_SIZE(result) > bytes_read) { @@ -1074,6 +1094,12 @@ _io_FileIO_truncate_impl(fileio *self, PyTypeObject *cls, PyObject *posobj) return NULL; } + /* Sometimes a large file is truncated. While estimated_size is used as a + estimate, that it is much larger than the actual size can result in a + significant over allocation and sometimes a MemoryError / running out of + memory. */ + self->estimated_size = pos; + return posobj; } #endif /* HAVE_FTRUNCATE */ diff --git a/Modules/_opcode.c b/Modules/_opcode.c index 67643641bea861..dc93063aee7e54 100644 --- a/Modules/_opcode.c +++ b/Modules/_opcode.c @@ -10,6 +10,8 @@ #include "pycore_compile.h" #include "pycore_intrinsics.h" #include "pycore_optimizer.h" // _Py_GetExecutor() +#include "pycore_opcode_metadata.h" // IS_VALID_OPCODE, OPCODE_HAS_*, etc +#include "pycore_opcode_utils.h" /*[clinic input] module _opcode @@ -81,7 +83,7 @@ static int _opcode_is_valid_impl(PyObject *module, int opcode) /*[clinic end generated code: output=b0d918ea1d073f65 input=fe23e0aa194ddae0]*/ { - return _PyCompile_OpcodeIsValid(opcode); + return IS_VALID_OPCODE(opcode); } /*[clinic input] @@ -97,8 +99,7 @@ static int _opcode_has_arg_impl(PyObject *module, int opcode) /*[clinic end generated code: output=7a062d3b2dcc0815 input=93d878ba6361db5f]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasArg(opcode); + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_ARG(opcode); } /*[clinic input] @@ -114,8 +115,7 @@ static int _opcode_has_const_impl(PyObject *module, int opcode) /*[clinic end generated code: output=c646d5027c634120 input=a6999e4cf13f9410]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasConst(opcode); + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_CONST(opcode); } /*[clinic input] @@ -131,8 +131,7 @@ static int _opcode_has_name_impl(PyObject *module, int opcode) /*[clinic end generated code: output=b49a83555c2fa517 input=448aa5e4bcc947ba]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasName(opcode); + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_NAME(opcode); } /*[clinic input] @@ -148,9 +147,7 @@ static int _opcode_has_jump_impl(PyObject *module, int opcode) /*[clinic end generated code: output=e9c583c669f1c46a input=35f711274357a0c3]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasJump(opcode); - + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_JUMP(opcode); } /*[clinic input] @@ -171,9 +168,7 @@ static int _opcode_has_free_impl(PyObject *module, int opcode) /*[clinic end generated code: output=d81ae4d79af0ee26 input=117dcd5c19c1139b]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasFree(opcode); - + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_FREE(opcode); } /*[clinic input] @@ -189,8 +184,7 @@ static int _opcode_has_local_impl(PyObject *module, int opcode) /*[clinic end generated code: output=da5a8616b7a5097b input=9a798ee24aaef49d]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasLocal(opcode); + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_LOCAL(opcode); } /*[clinic input] @@ -206,8 +200,7 @@ static int _opcode_has_exc_impl(PyObject *module, int opcode) /*[clinic end generated code: output=41b68dff0ec82a52 input=db0e4bdb9bf13fa5]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasExc(opcode); + return IS_VALID_OPCODE(opcode) && IS_BLOCK_PUSH_OPCODE(opcode); } /*[clinic input] @@ -424,7 +417,7 @@ opcode_functions[] = { {NULL, NULL, 0, NULL} }; -int +static int _opcode_exec(PyObject *m) { if (PyModule_AddIntMacro(m, ENABLE_SPECIALIZATION) < 0) { return -1; diff --git a/Modules/_testcapi/pyatomic.c b/Modules/_testcapi/pyatomic.c index 4f72844535ebd6..850de6f9c3366b 100644 --- a/Modules/_testcapi/pyatomic.c +++ b/Modules/_testcapi/pyatomic.c @@ -125,6 +125,7 @@ test_atomic_fences(PyObject *self, PyObject *obj) { // Just make sure that the fences compile. We are not // testing any synchronizing ordering. _Py_atomic_fence_seq_cst(); + _Py_atomic_fence_acquire(); _Py_atomic_fence_release(); Py_RETURN_NONE; } diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 2b11a01595b0bc..149e552af3a729 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -158,6 +158,10 @@ ASSERT_DICT_LOCKED(PyObject *op) if (!_PyInterpreterState_GET()->stoptheworld.world_stopped) { \ ASSERT_DICT_LOCKED(op); \ } +#define ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(op) \ + if (!_PyInterpreterState_GET()->stoptheworld.world_stopped) { \ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op); \ + } #define IS_DICT_SHARED(mp) _PyObject_GC_IS_SHARED(mp) #define SET_DICT_SHARED(mp) _PyObject_GC_SET_SHARED(mp) @@ -165,16 +169,15 @@ ASSERT_DICT_LOCKED(PyObject *op) #define STORE_INDEX(keys, size, idx, value) _Py_atomic_store_int##size##_relaxed(&((int##size##_t*)keys->dk_indices)[idx], (int##size##_t)value); #define ASSERT_OWNED_OR_SHARED(mp) \ assert(_Py_IsOwnedByCurrentThread((PyObject *)mp) || IS_DICT_SHARED(mp)); -#define LOAD_KEYS_NENTRIES(d) #define LOCK_KEYS_IF_SPLIT(keys, kind) \ if (kind == DICT_KEYS_SPLIT) { \ - LOCK_KEYS(dk); \ + LOCK_KEYS(keys); \ } #define UNLOCK_KEYS_IF_SPLIT(keys, kind) \ if (kind == DICT_KEYS_SPLIT) { \ - UNLOCK_KEYS(dk); \ + UNLOCK_KEYS(keys); \ } static inline Py_ssize_t @@ -208,7 +211,7 @@ set_values(PyDictObject *mp, PyDictValues *values) #define INCREF_KEYS(dk) _Py_atomic_add_ssize(&dk->dk_refcnt, 1) // Dec refs the keys object, giving the previous value #define DECREF_KEYS(dk) _Py_atomic_add_ssize(&dk->dk_refcnt, -1) -#define LOAD_KEYS_NENTIRES(keys) _Py_atomic_load_ssize_relaxed(&keys->dk_nentries) +#define LOAD_KEYS_NENTRIES(keys) _Py_atomic_load_ssize_relaxed(&keys->dk_nentries) #define INCREF_KEYS_FT(dk) dictkeys_incref(dk) #define DECREF_KEYS_FT(dk, shared) dictkeys_decref(_PyInterpreterState_GET(), dk, shared) @@ -227,6 +230,7 @@ static inline void split_keys_entry_added(PyDictKeysObject *keys) #define ASSERT_DICT_LOCKED(op) #define ASSERT_WORLD_STOPPED_OR_DICT_LOCKED(op) +#define ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(op) #define LOCK_KEYS(keys) #define UNLOCK_KEYS(keys) #define ASSERT_KEYS_LOCKED(keys) @@ -234,7 +238,7 @@ static inline void split_keys_entry_added(PyDictKeysObject *keys) #define STORE_SHARED_KEY(key, value) key = value #define INCREF_KEYS(dk) dk->dk_refcnt++ #define DECREF_KEYS(dk) dk->dk_refcnt-- -#define LOAD_KEYS_NENTIRES(keys) keys->dk_nentries +#define LOAD_KEYS_NENTRIES(keys) keys->dk_nentries #define INCREF_KEYS_FT(dk) #define DECREF_KEYS_FT(dk, shared) #define LOCK_KEYS_IF_SPLIT(keys, kind) @@ -689,10 +693,15 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) int splitted = _PyDict_HasSplitTable(mp); Py_ssize_t usable = USABLE_FRACTION(DK_SIZE(keys)); + // In the free-threaded build, shared keys may be concurrently modified, + // so use atomic loads. + Py_ssize_t dk_usable = FT_ATOMIC_LOAD_SSIZE_ACQUIRE(keys->dk_usable); + Py_ssize_t dk_nentries = FT_ATOMIC_LOAD_SSIZE_ACQUIRE(keys->dk_nentries); + CHECK(0 <= mp->ma_used && mp->ma_used <= usable); - CHECK(0 <= keys->dk_usable && keys->dk_usable <= usable); - CHECK(0 <= keys->dk_nentries && keys->dk_nentries <= usable); - CHECK(keys->dk_usable + keys->dk_nentries <= usable); + CHECK(0 <= dk_usable && dk_usable <= usable); + CHECK(0 <= dk_nentries && dk_nentries <= usable); + CHECK(dk_usable + dk_nentries <= usable); if (!splitted) { /* combined table */ @@ -709,6 +718,7 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) } if (check_content) { + LOCK_KEYS_IF_SPLIT(keys, keys->dk_kind); for (Py_ssize_t i=0; i < DK_SIZE(keys); i++) { Py_ssize_t ix = dictkeys_get_index(keys, i); CHECK(DKIX_DUMMY <= ix && ix <= usable); @@ -764,6 +774,7 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) CHECK(mp->ma_values->values[index] != NULL); } } + UNLOCK_KEYS_IF_SPLIT(keys, keys->dk_kind); } return 1; @@ -4032,7 +4043,7 @@ dict_equal_lock_held(PyDictObject *a, PyDictObject *b) /* can't be equal if # of entries differ */ return 0; /* Same # of entries -- check all of 'em. Exit early on any diff. */ - for (i = 0; i < LOAD_KEYS_NENTIRES(a->ma_keys); i++) { + for (i = 0; i < LOAD_KEYS_NENTRIES(a->ma_keys); i++) { PyObject *key, *aval; Py_hash_t hash; if (DK_IS_UNICODE(a->ma_keys)) { @@ -6667,10 +6678,10 @@ make_dict_from_instance_attributes(PyInterpreterState *interp, return res; } -static PyDictObject * -materialize_managed_dict_lock_held(PyObject *obj) +PyDictObject * +_PyObject_MaterializeManagedDict_LockHeld(PyObject *obj) { - _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(obj); + ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(obj); PyDictValues *values = _PyObject_InlineValues(obj); PyInterpreterState *interp = _PyInterpreterState_GET(); @@ -6699,7 +6710,7 @@ _PyObject_MaterializeManagedDict(PyObject *obj) goto exit; } #endif - dict = materialize_managed_dict_lock_held(obj); + dict = _PyObject_MaterializeManagedDict_LockHeld(obj); #ifdef Py_GIL_DISABLED exit: @@ -7132,7 +7143,7 @@ PyObject_ClearManagedDict(PyObject *obj) int _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj) { - _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(obj); + ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(obj); assert(_PyObject_ManagedDictPointer(obj)->dict == mp); assert(_PyObject_InlineValuesConsistencyCheck(obj)); diff --git a/Objects/listobject.c b/Objects/listobject.c index 9eae9626f7c1f1..f29f58dc25be04 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -3244,7 +3244,7 @@ list_index_impl(PyListObject *self, PyObject *value, Py_ssize_t start, else if (cmp < 0) return NULL; } - PyErr_Format(PyExc_ValueError, "%R is not in list", value); + PyErr_SetString(PyExc_ValueError, "list.index(x): x not in list"); return NULL; } diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index d033e2bad1891a..a6a71802ef8e01 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -386,8 +386,16 @@ _PyMem_ArenaFree(void *Py_UNUSED(ctx), void *ptr, ) { #ifdef MS_WINDOWS + /* Unlike free(), VirtualFree() does not special-case NULL to noop. */ + if (ptr == NULL) { + return; + } VirtualFree(ptr, 0, MEM_RELEASE); #elif defined(ARENAS_USE_MMAP) + /* Unlike free(), munmap() does not special-case NULL to noop. */ + if (ptr == NULL) { + return; + } munmap(ptr, size); #else free(ptr); diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index d5db48c143324f..9727b4f47b53a1 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -655,7 +655,7 @@ range_index(rangeobject *r, PyObject *ob) } /* object is not in the range */ - PyErr_Format(PyExc_ValueError, "%R is not in range", ob); + PyErr_SetString(PyExc_ValueError, "range.index(x): x not in range"); return NULL; } diff --git a/Objects/setobject.c b/Objects/setobject.c index eb0c404bf6b8e0..5d7ad395d08c90 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -184,14 +184,14 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) found_unused_or_dummy: if (freeslot == NULL) goto found_unused; - so->used++; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used + 1); freeslot->key = key; freeslot->hash = hash; return 0; found_unused: so->fill++; - so->used++; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used + 1); entry->key = key; entry->hash = hash; if ((size_t)so->fill*5 < mask*3) @@ -357,7 +357,7 @@ set_discard_entry(PySetObject *so, PyObject *key, Py_hash_t hash) old_key = entry->key; entry->key = dummy; entry->hash = -1; - so->used--; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used - 1); Py_DECREF(old_key); return DISCARD_FOUND; } @@ -397,7 +397,7 @@ set_empty_to_minsize(PySetObject *so) { memset(so->smalltable, 0, sizeof(so->smalltable)); so->fill = 0; - so->used = 0; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, 0); so->mask = PySet_MINSIZE - 1; so->table = so->smalltable; so->hash = -1; @@ -615,7 +615,7 @@ set_merge_lock_held(PySetObject *so, PyObject *otherset) } } so->fill = other->fill; - so->used = other->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, other->used); return 0; } @@ -624,7 +624,7 @@ set_merge_lock_held(PySetObject *so, PyObject *otherset) setentry *newtable = so->table; size_t newmask = (size_t)so->mask; so->fill = other->used; - so->used = other->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, other->used); for (i = other->mask + 1; i > 0 ; i--, other_entry++) { key = other_entry->key; if (key != NULL && key != dummy) { @@ -678,7 +678,7 @@ set_pop_impl(PySetObject *so) key = entry->key; entry->key = dummy; entry->hash = -1; - so->used--; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used - 1); so->finger = entry - so->table + 1; /* next place to start */ return key; } @@ -1173,7 +1173,9 @@ set_swap_bodies(PySetObject *a, PySetObject *b) Py_hash_t h; t = a->fill; a->fill = b->fill; b->fill = t; - t = a->used; a->used = b->used; b->used = t; + t = a->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(a->used, b->used); + FT_ATOMIC_STORE_SSIZE_RELAXED(b->used, t); t = a->mask; a->mask = b->mask; b->mask = t; u = a->table; diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 994258f20b495d..3704d095a977ea 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -1153,7 +1153,7 @@ maybe_freelist_pop(Py_ssize_t size) return NULL; } assert(size > 0); - if (size < PyTuple_MAXSAVESIZE) { + if (size <= PyTuple_MAXSAVESIZE) { Py_ssize_t index = size - 1; PyTupleObject *op = TUPLE_FREELIST.items[index]; if (op != NULL) { diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 447e561c0d4440..587632cecfba9d 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -5387,7 +5387,7 @@ _PyType_LookupRef(PyTypeObject *type, PyObject *name) #ifdef Py_GIL_DISABLED // synchronize-with other writing threads by doing an acquire load on the sequence while (1) { - int sequence = _PySeqLock_BeginRead(&entry->sequence); + uint32_t sequence = _PySeqLock_BeginRead(&entry->sequence); uint32_t entry_version = _Py_atomic_load_uint32_relaxed(&entry->version); uint32_t type_version = _Py_atomic_load_uint32_acquire(&type->tp_version_tag); if (entry_version == type_version && @@ -6540,28 +6540,11 @@ compatible_for_assignment(PyTypeObject* oldto, PyTypeObject* newto, const char* return 0; } -static int -object_set_class(PyObject *self, PyObject *value, void *closure) -{ - - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "can't delete __class__ attribute"); - return -1; - } - if (!PyType_Check(value)) { - PyErr_Format(PyExc_TypeError, - "__class__ must be set to a class, not '%s' object", - Py_TYPE(value)->tp_name); - return -1; - } - PyTypeObject *newto = (PyTypeObject *)value; - if (PySys_Audit("object.__setattr__", "OsO", - self, "__class__", value) < 0) { - return -1; - } +static int +object_set_class_world_stopped(PyObject *self, PyTypeObject *newto) +{ PyTypeObject *oldto = Py_TYPE(self); /* In versions of CPython prior to 3.5, the code in @@ -6627,39 +6610,66 @@ object_set_class(PyObject *self, PyObject *value, void *closure) /* Changing the class will change the implicit dict keys, * so we must materialize the dictionary first. */ if (oldto->tp_flags & Py_TPFLAGS_INLINE_VALUES) { - PyDictObject *dict = _PyObject_MaterializeManagedDict(self); + PyDictObject *dict = _PyObject_GetManagedDict(self); if (dict == NULL) { - return -1; + dict = _PyObject_MaterializeManagedDict_LockHeld(self); + if (dict == NULL) { + return -1; + } } - bool error = false; - - Py_BEGIN_CRITICAL_SECTION2(self, dict); - - // If we raced after materialization and replaced the dict - // then the materialized dict should no longer have the - // inline values in which case detach is a nop. - assert(_PyObject_GetManagedDict(self) == dict || - dict->ma_values != _PyObject_InlineValues(self)); + assert(_PyObject_GetManagedDict(self) == dict); if (_PyDict_DetachFromObject(dict, self) < 0) { - error = true; - } - - Py_END_CRITICAL_SECTION2(); - if (error) { return -1; } + } if (newto->tp_flags & Py_TPFLAGS_HEAPTYPE) { Py_INCREF(newto); } - Py_BEGIN_CRITICAL_SECTION(self); - // The real Py_TYPE(self) (`oldto`) may have changed from - // underneath us in another thread, so we re-fetch it here. - oldto = Py_TYPE(self); + Py_SET_TYPE(self, newto); - Py_END_CRITICAL_SECTION(); + + return 0; + } + else { + return -1; + } +} + +static int +object_set_class(PyObject *self, PyObject *value, void *closure) +{ + + if (value == NULL) { + PyErr_SetString(PyExc_TypeError, + "can't delete __class__ attribute"); + return -1; + } + if (!PyType_Check(value)) { + PyErr_Format(PyExc_TypeError, + "__class__ must be set to a class, not '%s' object", + Py_TYPE(value)->tp_name); + return -1; + } + PyTypeObject *newto = (PyTypeObject *)value; + + if (PySys_Audit("object.__setattr__", "OsO", + self, "__class__", value) < 0) { + return -1; + } + +#ifdef Py_GIL_DISABLED + PyInterpreterState *interp = _PyInterpreterState_GET(); + _PyEval_StopTheWorld(interp); +#endif + PyTypeObject *oldto = Py_TYPE(self); + int res = object_set_class_world_stopped(self, newto); +#ifdef Py_GIL_DISABLED + _PyEval_StartTheWorld(interp); +#endif + if (res == 0) { if (oldto->tp_flags & Py_TPFLAGS_HEAPTYPE) { Py_DECREF(oldto); } @@ -6667,9 +6677,7 @@ object_set_class(PyObject *self, PyObject *value, void *closure) RARE_EVENT_INC(set_class); return 0; } - else { - return -1; - } + return res; } static PyGetSetDef object_getsets[] = { diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 3378ed54203f18..f36fcb8caece33 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -163,6 +163,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 742d88d9e1fa7a..a1b43addf9e36a 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -432,6 +432,9 @@ Include\cpython + + Include\cpython + Include\cpython diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets index 4aa14ed1fad9eb..416241d9d0df10 100644 --- a/PCbuild/regen.targets +++ b/PCbuild/regen.targets @@ -90,23 +90,23 @@ Inputs="@(_CasesSources)" Outputs="@(_CasesOutputs)" DependsOnTargets="FindPythonForBuild"> - - - - - - - - - diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 3711cf1280f7bf..e6867f138a5ccb 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1143,6 +1143,279 @@ def visitModule(self, mod): return result; } +/* + * Perform the following validations: + * + * - All keyword arguments are known 'fields' or 'attributes'. + * - No field or attribute would be left unfilled after copy.replace(). + * + * On success, this returns 1. Otherwise, set a TypeError + * exception and returns -1 (no exception is set if some + * other internal errors occur). + * + * Parameters + * + * self The AST node instance. + * dict The AST node instance dictionary (self.__dict__). + * fields The list of fields (self._fields). + * attributes The list of attributes (self._attributes). + * kwargs Keyword arguments passed to ast_type_replace(). + * + * The 'dict', 'fields', 'attributes' and 'kwargs' arguments can be NULL. + * + * Note: this function can be removed in 3.15 since the verification + * will be done inside the constructor. + */ +static inline int +ast_type_replace_check(PyObject *self, + PyObject *dict, + PyObject *fields, + PyObject *attributes, + PyObject *kwargs) +{ + // While it is possible to make some fast paths that would avoid + // allocating objects on the stack, this would cost us readability. + // For instance, if 'fields' and 'attributes' are both empty, and + // 'kwargs' is not empty, we could raise a TypeError immediately. + PyObject *expecting = PySet_New(fields); + if (expecting == NULL) { + return -1; + } + if (attributes) { + if (_PySet_Update(expecting, attributes) < 0) { + Py_DECREF(expecting); + return -1; + } + } + // Any keyword argument that is neither a field nor attribute is rejected. + // We first need to check whether a keyword argument is accepted or not. + // If all keyword arguments are accepted, we compute the required fields + // and attributes. A field or attribute is not needed if: + // + // 1) it is given in 'kwargs', or + // 2) it already exists on 'self'. + if (kwargs) { + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(kwargs, &pos, &key, &value)) { + int rc = PySet_Discard(expecting, key); + if (rc < 0) { + Py_DECREF(expecting); + return -1; + } + if (rc == 0) { + PyErr_Format(PyExc_TypeError, + "%.400s.__replace__ got an unexpected keyword " + "argument '%U'.", Py_TYPE(self)->tp_name, key); + Py_DECREF(expecting); + return -1; + } + } + } + // check that the remaining fields or attributes would be filled + if (dict) { + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(dict, &pos, &key, &value)) { + // Mark fields or attributes that are found on the instance + // as non-mandatory. If they are not given in 'kwargs', they + // will be shallow-coied; otherwise, they would be replaced + // (not in this function). + if (PySet_Discard(expecting, key) < 0) { + Py_DECREF(expecting); + return -1; + } + } + if (attributes) { + // Some attributes may or may not be present at runtime. + // In particular, now that we checked whether 'kwargs' + // is correct or not, we allow any attribute to be missing. + // + // Note that fields must still be entirely determined when + // calling the constructor later. + PyObject *unused = PyObject_CallMethodOneArg(expecting, + &_Py_ID(difference_update), + attributes); + if (unused == NULL) { + Py_DECREF(expecting); + return -1; + } + Py_DECREF(unused); + } + } + // Now 'expecting' contains the fields or attributes + // that would not be filled inside ast_type_replace(). + Py_ssize_t m = PySet_GET_SIZE(expecting); + if (m > 0) { + PyObject *names = PyList_New(m); + if (names == NULL) { + Py_DECREF(expecting); + return -1; + } + Py_ssize_t i = 0, pos = 0; + PyObject *item; + Py_hash_t hash; + while (_PySet_NextEntry(expecting, &pos, &item, &hash)) { + PyObject *name = PyObject_Repr(item); + if (name == NULL) { + Py_DECREF(expecting); + Py_DECREF(names); + return -1; + } + // steal the reference 'name' + PyList_SET_ITEM(names, i++, name); + } + Py_DECREF(expecting); + if (PyList_Sort(names) < 0) { + Py_DECREF(names); + return -1; + } + PyObject *sep = PyUnicode_FromString(", "); + if (sep == NULL) { + Py_DECREF(names); + return -1; + } + PyObject *str_names = PyUnicode_Join(sep, names); + Py_DECREF(sep); + Py_DECREF(names); + if (str_names == NULL) { + return -1; + } + PyErr_Format(PyExc_TypeError, + "%.400s.__replace__ missing %ld keyword argument%s: %U.", + Py_TYPE(self)->tp_name, m, m == 1 ? "" : "s", str_names); + Py_DECREF(str_names); + return -1; + } + else { + Py_DECREF(expecting); + return 1; + } +} + +/* + * Python equivalent: + * + * for key in keys: + * if hasattr(self, key): + * payload[key] = getattr(self, key) + * + * The 'keys' argument is a sequence corresponding to + * the '_fields' or the '_attributes' of an AST node. + * + * This returns -1 if an error occurs and 0 otherwise. + * + * Parameters + * + * payload A dictionary to fill. + * keys A sequence of keys or NULL for an empty sequence. + * dict The AST node instance dictionary (must not be NULL). + */ +static inline int +ast_type_replace_update_payload(PyObject *payload, + PyObject *keys, + PyObject *dict) +{ + assert(dict != NULL); + if (keys == NULL) { + return 0; + } + Py_ssize_t n = PySequence_Size(keys); + if (n == -1) { + return -1; + } + for (Py_ssize_t i = 0; i < n; i++) { + PyObject *key = PySequence_GetItem(keys, i); + if (key == NULL) { + return -1; + } + PyObject *value; + if (PyDict_GetItemRef(dict, key, &value) < 0) { + Py_DECREF(key); + return -1; + } + if (value == NULL) { + Py_DECREF(key); + // If a field or attribute is not present at runtime, it should + // be explicitly given in 'kwargs'. If not, the constructor will + // issue a warning (which becomes an error in 3.15). + continue; + } + int rc = PyDict_SetItem(payload, key, value); + Py_DECREF(key); + Py_DECREF(value); + if (rc < 0) { + return -1; + } + } + return 0; +} + +/* copy.replace() support (shallow copy) */ +static PyObject * +ast_type_replace(PyObject *self, PyObject *args, PyObject *kwargs) +{ + if (!_PyArg_NoPositional("__replace__", args)) { + return NULL; + } + + struct ast_state *state = get_ast_state(); + if (state == NULL) { + return NULL; + } + + PyObject *result = NULL; + // known AST class fields and attributes + PyObject *fields = NULL, *attributes = NULL; + // current instance dictionary + PyObject *dict = NULL; + // constructor positional and keyword arguments + PyObject *empty_tuple = NULL, *payload = NULL; + + PyObject *type = (PyObject *)Py_TYPE(self); + if (PyObject_GetOptionalAttr(type, state->_fields, &fields) < 0) { + goto cleanup; + } + if (PyObject_GetOptionalAttr(type, state->_attributes, &attributes) < 0) { + goto cleanup; + } + if (PyObject_GetOptionalAttr(self, state->__dict__, &dict) < 0) { + goto cleanup; + } + if (ast_type_replace_check(self, dict, fields, attributes, kwargs) < 0) { + goto cleanup; + } + empty_tuple = PyTuple_New(0); + if (empty_tuple == NULL) { + goto cleanup; + } + payload = PyDict_New(); + if (payload == NULL) { + goto cleanup; + } + if (dict) { // in case __dict__ is missing (for some obscure reason) + // copy the instance's fields (possibly NULL) + if (ast_type_replace_update_payload(payload, fields, dict) < 0) { + goto cleanup; + } + // copy the instance's attributes (possibly NULL) + if (ast_type_replace_update_payload(payload, attributes, dict) < 0) { + goto cleanup; + } + } + if (kwargs && PyDict_Update(payload, kwargs) < 0) { + goto cleanup; + } + result = PyObject_Call(type, empty_tuple, payload); +cleanup: + Py_XDECREF(payload); + Py_XDECREF(empty_tuple); + Py_XDECREF(dict); + Py_XDECREF(attributes); + Py_XDECREF(fields); + return result; +} + static PyMemberDef ast_type_members[] = { {"__dictoffset__", Py_T_PYSSIZET, offsetof(AST_object, dict), Py_READONLY}, {NULL} /* Sentinel */ @@ -1150,6 +1423,10 @@ def visitModule(self, mod): static PyMethodDef ast_type_methods[] = { {"__reduce__", ast_type_reduce, METH_NOARGS, NULL}, + {"__replace__", _PyCFunction_CAST(ast_type_replace), METH_VARARGS | METH_KEYWORDS, + PyDoc_STR("__replace__($self, /, **fields)\\n--\\n\\n" + "Return a copy of the AST node with new values " + "for the specified fields.")}, {NULL} }; @@ -1784,7 +2061,9 @@ def generate_module_def(mod, metadata, f, internal_h): #include "pycore_ceval.h" // _Py_EnterRecursiveCall #include "pycore_lock.h" // _PyOnceFlag #include "pycore_interp.h" // _PyInterpreterState.ast + #include "pycore_modsupport.h" // _PyArg_NoPositional() #include "pycore_pystate.h" // _PyInterpreterState_GET() + #include "pycore_setobject.h" // _PySet_NextEntry(), _PySet_Update() #include "pycore_unionobject.h" // _Py_union_type_or #include "structmember.h" #include diff --git a/Python/Python-ast.c b/Python/Python-ast.c index e38a145271589a..4d0db457a8b172 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -6,7 +6,9 @@ #include "pycore_ceval.h" // _Py_EnterRecursiveCall #include "pycore_lock.h" // _PyOnceFlag #include "pycore_interp.h" // _PyInterpreterState.ast +#include "pycore_modsupport.h" // _PyArg_NoPositional() #include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_setobject.h" // _PySet_NextEntry(), _PySet_Update() #include "pycore_unionobject.h" // _Py_union_type_or #include "structmember.h" #include @@ -5342,6 +5344,279 @@ ast_type_reduce(PyObject *self, PyObject *unused) return result; } +/* + * Perform the following validations: + * + * - All keyword arguments are known 'fields' or 'attributes'. + * - No field or attribute would be left unfilled after copy.replace(). + * + * On success, this returns 1. Otherwise, set a TypeError + * exception and returns -1 (no exception is set if some + * other internal errors occur). + * + * Parameters + * + * self The AST node instance. + * dict The AST node instance dictionary (self.__dict__). + * fields The list of fields (self._fields). + * attributes The list of attributes (self._attributes). + * kwargs Keyword arguments passed to ast_type_replace(). + * + * The 'dict', 'fields', 'attributes' and 'kwargs' arguments can be NULL. + * + * Note: this function can be removed in 3.15 since the verification + * will be done inside the constructor. + */ +static inline int +ast_type_replace_check(PyObject *self, + PyObject *dict, + PyObject *fields, + PyObject *attributes, + PyObject *kwargs) +{ + // While it is possible to make some fast paths that would avoid + // allocating objects on the stack, this would cost us readability. + // For instance, if 'fields' and 'attributes' are both empty, and + // 'kwargs' is not empty, we could raise a TypeError immediately. + PyObject *expecting = PySet_New(fields); + if (expecting == NULL) { + return -1; + } + if (attributes) { + if (_PySet_Update(expecting, attributes) < 0) { + Py_DECREF(expecting); + return -1; + } + } + // Any keyword argument that is neither a field nor attribute is rejected. + // We first need to check whether a keyword argument is accepted or not. + // If all keyword arguments are accepted, we compute the required fields + // and attributes. A field or attribute is not needed if: + // + // 1) it is given in 'kwargs', or + // 2) it already exists on 'self'. + if (kwargs) { + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(kwargs, &pos, &key, &value)) { + int rc = PySet_Discard(expecting, key); + if (rc < 0) { + Py_DECREF(expecting); + return -1; + } + if (rc == 0) { + PyErr_Format(PyExc_TypeError, + "%.400s.__replace__ got an unexpected keyword " + "argument '%U'.", Py_TYPE(self)->tp_name, key); + Py_DECREF(expecting); + return -1; + } + } + } + // check that the remaining fields or attributes would be filled + if (dict) { + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(dict, &pos, &key, &value)) { + // Mark fields or attributes that are found on the instance + // as non-mandatory. If they are not given in 'kwargs', they + // will be shallow-coied; otherwise, they would be replaced + // (not in this function). + if (PySet_Discard(expecting, key) < 0) { + Py_DECREF(expecting); + return -1; + } + } + if (attributes) { + // Some attributes may or may not be present at runtime. + // In particular, now that we checked whether 'kwargs' + // is correct or not, we allow any attribute to be missing. + // + // Note that fields must still be entirely determined when + // calling the constructor later. + PyObject *unused = PyObject_CallMethodOneArg(expecting, + &_Py_ID(difference_update), + attributes); + if (unused == NULL) { + Py_DECREF(expecting); + return -1; + } + Py_DECREF(unused); + } + } + // Now 'expecting' contains the fields or attributes + // that would not be filled inside ast_type_replace(). + Py_ssize_t m = PySet_GET_SIZE(expecting); + if (m > 0) { + PyObject *names = PyList_New(m); + if (names == NULL) { + Py_DECREF(expecting); + return -1; + } + Py_ssize_t i = 0, pos = 0; + PyObject *item; + Py_hash_t hash; + while (_PySet_NextEntry(expecting, &pos, &item, &hash)) { + PyObject *name = PyObject_Repr(item); + if (name == NULL) { + Py_DECREF(expecting); + Py_DECREF(names); + return -1; + } + // steal the reference 'name' + PyList_SET_ITEM(names, i++, name); + } + Py_DECREF(expecting); + if (PyList_Sort(names) < 0) { + Py_DECREF(names); + return -1; + } + PyObject *sep = PyUnicode_FromString(", "); + if (sep == NULL) { + Py_DECREF(names); + return -1; + } + PyObject *str_names = PyUnicode_Join(sep, names); + Py_DECREF(sep); + Py_DECREF(names); + if (str_names == NULL) { + return -1; + } + PyErr_Format(PyExc_TypeError, + "%.400s.__replace__ missing %ld keyword argument%s: %U.", + Py_TYPE(self)->tp_name, m, m == 1 ? "" : "s", str_names); + Py_DECREF(str_names); + return -1; + } + else { + Py_DECREF(expecting); + return 1; + } +} + +/* + * Python equivalent: + * + * for key in keys: + * if hasattr(self, key): + * payload[key] = getattr(self, key) + * + * The 'keys' argument is a sequence corresponding to + * the '_fields' or the '_attributes' of an AST node. + * + * This returns -1 if an error occurs and 0 otherwise. + * + * Parameters + * + * payload A dictionary to fill. + * keys A sequence of keys or NULL for an empty sequence. + * dict The AST node instance dictionary (must not be NULL). + */ +static inline int +ast_type_replace_update_payload(PyObject *payload, + PyObject *keys, + PyObject *dict) +{ + assert(dict != NULL); + if (keys == NULL) { + return 0; + } + Py_ssize_t n = PySequence_Size(keys); + if (n == -1) { + return -1; + } + for (Py_ssize_t i = 0; i < n; i++) { + PyObject *key = PySequence_GetItem(keys, i); + if (key == NULL) { + return -1; + } + PyObject *value; + if (PyDict_GetItemRef(dict, key, &value) < 0) { + Py_DECREF(key); + return -1; + } + if (value == NULL) { + Py_DECREF(key); + // If a field or attribute is not present at runtime, it should + // be explicitly given in 'kwargs'. If not, the constructor will + // issue a warning (which becomes an error in 3.15). + continue; + } + int rc = PyDict_SetItem(payload, key, value); + Py_DECREF(key); + Py_DECREF(value); + if (rc < 0) { + return -1; + } + } + return 0; +} + +/* copy.replace() support (shallow copy) */ +static PyObject * +ast_type_replace(PyObject *self, PyObject *args, PyObject *kwargs) +{ + if (!_PyArg_NoPositional("__replace__", args)) { + return NULL; + } + + struct ast_state *state = get_ast_state(); + if (state == NULL) { + return NULL; + } + + PyObject *result = NULL; + // known AST class fields and attributes + PyObject *fields = NULL, *attributes = NULL; + // current instance dictionary + PyObject *dict = NULL; + // constructor positional and keyword arguments + PyObject *empty_tuple = NULL, *payload = NULL; + + PyObject *type = (PyObject *)Py_TYPE(self); + if (PyObject_GetOptionalAttr(type, state->_fields, &fields) < 0) { + goto cleanup; + } + if (PyObject_GetOptionalAttr(type, state->_attributes, &attributes) < 0) { + goto cleanup; + } + if (PyObject_GetOptionalAttr(self, state->__dict__, &dict) < 0) { + goto cleanup; + } + if (ast_type_replace_check(self, dict, fields, attributes, kwargs) < 0) { + goto cleanup; + } + empty_tuple = PyTuple_New(0); + if (empty_tuple == NULL) { + goto cleanup; + } + payload = PyDict_New(); + if (payload == NULL) { + goto cleanup; + } + if (dict) { // in case __dict__ is missing (for some obscure reason) + // copy the instance's fields (possibly NULL) + if (ast_type_replace_update_payload(payload, fields, dict) < 0) { + goto cleanup; + } + // copy the instance's attributes (possibly NULL) + if (ast_type_replace_update_payload(payload, attributes, dict) < 0) { + goto cleanup; + } + } + if (kwargs && PyDict_Update(payload, kwargs) < 0) { + goto cleanup; + } + result = PyObject_Call(type, empty_tuple, payload); +cleanup: + Py_XDECREF(payload); + Py_XDECREF(empty_tuple); + Py_XDECREF(dict); + Py_XDECREF(attributes); + Py_XDECREF(fields); + return result; +} + static PyMemberDef ast_type_members[] = { {"__dictoffset__", Py_T_PYSSIZET, offsetof(AST_object, dict), Py_READONLY}, {NULL} /* Sentinel */ @@ -5349,6 +5624,10 @@ static PyMemberDef ast_type_members[] = { static PyMethodDef ast_type_methods[] = { {"__reduce__", ast_type_reduce, METH_NOARGS, NULL}, + {"__replace__", _PyCFunction_CAST(ast_type_replace), METH_VARARGS | METH_KEYWORDS, + PyDoc_STR("__replace__($self, /, **fields)\n--\n\n" + "Return a copy of the AST node with new values " + "for the specified fields.")}, {NULL} }; diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 6e50623cafa4ed..a5b45e358d9efb 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2516,6 +2516,49 @@ Without arguments, equivalent to locals().\n\ With an argument, equivalent to object.__dict__."); +/* Improved Kahan–Babuška algorithm by Arnold Neumaier + Neumaier, A. (1974), Rundungsfehleranalyse einiger Verfahren + zur Summation endlicher Summen. Z. angew. Math. Mech., + 54: 39-51. https://doi.org/10.1002/zamm.19740540106 + https://en.wikipedia.org/wiki/Kahan_summation_algorithm#Further_enhancements + */ + +typedef struct { + double hi; /* high-order bits for a running sum */ + double lo; /* a running compensation for lost low-order bits */ +} CompensatedSum; + +static inline CompensatedSum +cs_from_double(double x) +{ + return (CompensatedSum) {x}; +} + +static inline CompensatedSum +cs_add(CompensatedSum total, double x) +{ + double t = total.hi + x; + if (fabs(total.hi) >= fabs(x)) { + total.lo += (total.hi - t) + x; + } + else { + total.lo += (x - t) + total.hi; + } + return (CompensatedSum) {t, total.lo}; +} + +static inline double +cs_to_double(CompensatedSum total) +{ + /* Avoid losing the sign on a negative result, + and don't let adding the compensation convert + an infinite or overflowed sum to a NaN. */ + if (total.lo && isfinite(total.lo)) { + return total.hi + total.lo; + } + return total.hi; +} + /*[clinic input] sum as builtin_sum @@ -2628,8 +2671,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) } if (PyFloat_CheckExact(result)) { - double f_result = PyFloat_AS_DOUBLE(result); - double c = 0.0; + CompensatedSum re_sum = cs_from_double(PyFloat_AS_DOUBLE(result)); Py_SETREF(result, NULL); while(result == NULL) { item = PyIter_Next(iter); @@ -2637,28 +2679,10 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) Py_DECREF(iter); if (PyErr_Occurred()) return NULL; - /* Avoid losing the sign on a negative result, - and don't let adding the compensation convert - an infinite or overflowed sum to a NaN. */ - if (c && isfinite(c)) { - f_result += c; - } - return PyFloat_FromDouble(f_result); + return PyFloat_FromDouble(cs_to_double(re_sum)); } if (PyFloat_CheckExact(item)) { - // Improved Kahan–Babuška algorithm by Arnold Neumaier - // Neumaier, A. (1974), Rundungsfehleranalyse einiger Verfahren - // zur Summation endlicher Summen. Z. angew. Math. Mech., - // 54: 39-51. https://doi.org/10.1002/zamm.19740540106 - // https://en.wikipedia.org/wiki/Kahan_summation_algorithm#Further_enhancements - double x = PyFloat_AS_DOUBLE(item); - double t = f_result + x; - if (fabs(f_result) >= fabs(x)) { - c += (f_result - t) + x; - } else { - c += (x - t) + f_result; - } - f_result = t; + re_sum = cs_add(re_sum, PyFloat_AS_DOUBLE(item)); _Py_DECREF_SPECIALIZED(item, _PyFloat_ExactDealloc); continue; } @@ -2667,15 +2691,70 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) int overflow; value = PyLong_AsLongAndOverflow(item, &overflow); if (!overflow) { - f_result += (double)value; + re_sum.hi += (double)value; + Py_DECREF(item); + continue; + } + } + result = PyFloat_FromDouble(cs_to_double(re_sum)); + if (result == NULL) { + Py_DECREF(item); + Py_DECREF(iter); + return NULL; + } + temp = PyNumber_Add(result, item); + Py_DECREF(result); + Py_DECREF(item); + result = temp; + if (result == NULL) { + Py_DECREF(iter); + return NULL; + } + } + } + + if (PyComplex_CheckExact(result)) { + Py_complex z = PyComplex_AsCComplex(result); + CompensatedSum re_sum = cs_from_double(z.real); + CompensatedSum im_sum = cs_from_double(z.imag); + Py_SETREF(result, NULL); + while (result == NULL) { + item = PyIter_Next(iter); + if (item == NULL) { + Py_DECREF(iter); + if (PyErr_Occurred()) { + return NULL; + } + return PyComplex_FromDoubles(cs_to_double(re_sum), + cs_to_double(im_sum)); + } + if (PyComplex_CheckExact(item)) { + z = PyComplex_AsCComplex(item); + re_sum = cs_add(re_sum, z.real); + im_sum = cs_add(im_sum, z.imag); + Py_DECREF(item); + continue; + } + if (PyLong_Check(item)) { + long value; + int overflow; + value = PyLong_AsLongAndOverflow(item, &overflow); + if (!overflow) { + re_sum.hi += (double)value; + im_sum.hi += 0.0; Py_DECREF(item); continue; } } - if (c && isfinite(c)) { - f_result += c; + if (PyFloat_Check(item)) { + double value = PyFloat_AS_DOUBLE(item); + re_sum.hi += value; + im_sum.hi += 0.0; + _Py_DECREF_SPECIALIZED(item, _PyFloat_ExactDealloc); + continue; } - result = PyFloat_FromDouble(f_result); + result = PyComplex_FromDoubles(cs_to_double(re_sum), + cs_to_double(im_sum)); if (result == NULL) { Py_DECREF(item); Py_DECREF(iter); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 76587a4f0dc695..84241c64ffae88 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -2967,7 +2967,10 @@ dummy_func( assert(Py_TYPE(iter_o) == &PyListIter_Type); PyListObject *seq = it->it_seq; EXIT_IF(seq == NULL); - EXIT_IF((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)); + if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { + it->it_index = -1; + EXIT_IF(1); + } } op(_ITER_NEXT_LIST, (iter -- iter, next)) { diff --git a/Python/ceval.c b/Python/ceval.c index a240ed4321f7ee..d8bc830f8e80c1 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -730,15 +730,6 @@ _PyObjectArray_Free(PyObject **array, PyObject **scratch) * so consume 3 units of C stack */ #define PY_EVAL_C_STACK_UNITS 2 -#if defined(_MSC_VER) && defined(_Py_USING_PGO) -/* gh-111786: _PyEval_EvalFrameDefault is too large to optimize for speed with - PGO on MSVC. Disable that optimization temporarily. If this is fixed - upstream, we should gate this on the version of MSVC. - */ -# pragma optimize("t", off) -/* This setting is reversed below following _PyEval_EvalFrameDefault */ -#endif - PyObject* _Py_HOT_FUNCTION _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) { @@ -1158,7 +1149,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int # pragma GCC diagnostic pop #elif defined(_MSC_VER) /* MS_WINDOWS */ # pragma warning(pop) -# pragma optimize("", on) #endif static void diff --git a/Python/compile.c b/Python/compile.c index 30708e1dda9d43..4190b141324b38 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -71,9 +71,26 @@ ((C)->c_flags.cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \ && ((C)->u->u_ste->ste_type == ModuleBlock)) +struct compiler; + +typedef _PyInstruction instruction; +typedef _PyInstructionSequence instr_sequence; + +static instr_sequence *compiler_instr_sequence(struct compiler *c); +static int compiler_future_features(struct compiler *c); +static struct symtable *compiler_symtable(struct compiler *c); +static PySTEntryObject *compiler_symtable_entry(struct compiler *c); + +#define INSTR_SEQUENCE(C) compiler_instr_sequence(C) +#define FUTURE_FEATURES(C) compiler_future_features(C) +#define SYMTABLE(C) compiler_symtable(C) +#define SYMTABLE_ENTRY(C) compiler_symtable_entry(C) + typedef _Py_SourceLocation location; typedef struct _PyCfgBuilder cfg_builder; +static PyObject *compiler_maybe_mangle(struct compiler *c, PyObject *name); + #define LOCATION(LNO, END_LNO, COL, END_COL) \ ((const _Py_SourceLocation){(LNO), (END_LNO), (COL), (END_COL)}) @@ -129,12 +146,6 @@ enum { }; -typedef _PyInstruction instruction; -typedef _PyInstructionSequence instr_sequence; - -#define INITIAL_INSTR_SEQUENCE_SIZE 100 -#define INITIAL_INSTR_SEQUENCE_LABELS_MAP_SIZE 10 - static const int compare_masks[] = { [Py_LT] = COMPARISON_LESS_THAN, [Py_LE] = COMPARISON_LESS_THAN | COMPARISON_EQUALS, @@ -254,8 +265,6 @@ struct compiler { */ }; -#define INSTR_SEQUENCE(C) ((C)->u->u_instr_sequence) - typedef struct { // A list of strings corresponding to name captures. It is used to track: @@ -313,7 +322,6 @@ static int compiler_call_helper(struct compiler *c, location loc, asdl_keyword_seq *keywords); static int compiler_try_except(struct compiler *, stmt_ty); static int compiler_try_star_except(struct compiler *, stmt_ty); -static int compiler_set_qualname(struct compiler *); static int compiler_sync_comprehension_generator( struct compiler *c, location loc, @@ -554,8 +562,8 @@ compiler_unit_free(struct compiler_unit *u) PyMem_Free(u); } -static struct compiler_unit * -get_class_compiler_unit(struct compiler *c) +static int +compiler_add_static_attribute_to_class(struct compiler *c, PyObject *attr) { Py_ssize_t stack_size = PyList_GET_SIZE(c->c_stack); for (Py_ssize_t i = stack_size - 1; i >= 0; i--) { @@ -564,10 +572,12 @@ get_class_compiler_unit(struct compiler *c) capsule, CAPSULE_NAME); assert(u); if (u->u_scope_type == COMPILER_SCOPE_CLASS) { - return u; + assert(u->u_static_attributes); + RETURN_IF_ERROR(PySet_Add(u->u_static_attributes, attr)); + break; } } - return NULL; + return SUCCESS; } static int @@ -655,54 +665,6 @@ compiler_set_qualname(struct compiler *c) return SUCCESS; } -int -_PyCompile_OpcodeIsValid(int opcode) -{ - return IS_VALID_OPCODE(opcode); -} - -int -_PyCompile_OpcodeHasArg(int opcode) -{ - return OPCODE_HAS_ARG(opcode); -} - -int -_PyCompile_OpcodeHasConst(int opcode) -{ - return OPCODE_HAS_CONST(opcode); -} - -int -_PyCompile_OpcodeHasName(int opcode) -{ - return OPCODE_HAS_NAME(opcode); -} - -int -_PyCompile_OpcodeHasJump(int opcode) -{ - return OPCODE_HAS_JUMP(opcode); -} - -int -_PyCompile_OpcodeHasFree(int opcode) -{ - return OPCODE_HAS_FREE(opcode); -} - -int -_PyCompile_OpcodeHasLocal(int opcode) -{ - return OPCODE_HAS_LOCAL(opcode); -} - -int -_PyCompile_OpcodeHasExc(int opcode) -{ - return IS_BLOCK_PUSH_OPCODE(opcode); -} - static int codegen_addop_noarg(instr_sequence *seq, int opcode, location loc) { @@ -737,9 +699,11 @@ dict_add_o(PyObject *dict, PyObject *o) return arg; } -// Merge const *o* recursively and return constant key object. +/* Merge const *o* and return constant key object. + * If recursive, insert all elements if o is a tuple or frozen set. + */ static PyObject* -merge_consts_recursive(PyObject *const_cache, PyObject *o) +const_cache_insert(PyObject *const_cache, PyObject *o, bool recursive) { assert(PyDict_CheckExact(const_cache)); // None and Ellipsis are immortal objects, and key is the singleton. @@ -763,6 +727,10 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) } Py_DECREF(t); + if (!recursive) { + return key; + } + // We registered o in const_cache. // When o is a tuple or frozenset, we want to merge its // items too. @@ -770,7 +738,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) Py_ssize_t len = PyTuple_GET_SIZE(o); for (Py_ssize_t i = 0; i < len; i++) { PyObject *item = PyTuple_GET_ITEM(o, i); - PyObject *u = merge_consts_recursive(const_cache, item); + PyObject *u = const_cache_insert(const_cache, item, recursive); if (u == NULL) { Py_DECREF(key); return NULL; @@ -812,7 +780,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) PyObject *item; Py_hash_t hash; while (_PySet_NextEntry(o, &pos, &item, &hash)) { - PyObject *k = merge_consts_recursive(const_cache, item); + PyObject *k = const_cache_insert(const_cache, item, recursive); if (k == NULL) { Py_DECREF(tuple); Py_DECREF(key); @@ -846,39 +814,44 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) return key; } +static PyObject* +merge_consts_recursive(PyObject *const_cache, PyObject *o) +{ + return const_cache_insert(const_cache, o, true); +} + static Py_ssize_t -compiler_add_const(PyObject *const_cache, struct compiler_unit *u, PyObject *o) +compiler_add_const(struct compiler *c, PyObject *o) { - assert(PyDict_CheckExact(const_cache)); - PyObject *key = merge_consts_recursive(const_cache, o); + PyObject *key = merge_consts_recursive(c->c_const_cache, o); if (key == NULL) { return ERROR; } - Py_ssize_t arg = dict_add_o(u->u_metadata.u_consts, key); + Py_ssize_t arg = dict_add_o(c->u->u_metadata.u_consts, key); Py_DECREF(key); return arg; } static int -compiler_addop_load_const(PyObject *const_cache, struct compiler_unit *u, location loc, PyObject *o) +compiler_addop_load_const(struct compiler *c, location loc, PyObject *o) { - Py_ssize_t arg = compiler_add_const(const_cache, u, o); + Py_ssize_t arg = compiler_add_const(c, o); if (arg < 0) { return ERROR; } - return codegen_addop_i(u->u_instr_sequence, LOAD_CONST, arg, loc); + return codegen_addop_i(INSTR_SEQUENCE(c), LOAD_CONST, arg, loc); } static int -compiler_addop_o(struct compiler_unit *u, location loc, +compiler_addop_o(struct compiler *c, location loc, int opcode, PyObject *dict, PyObject *o) { Py_ssize_t arg = dict_add_o(dict, o); if (arg < 0) { return ERROR; } - return codegen_addop_i(u->u_instr_sequence, opcode, arg, loc); + return codegen_addop_i(INSTR_SEQUENCE(c), opcode, arg, loc); } #define LOAD_METHOD -1 @@ -887,10 +860,10 @@ compiler_addop_o(struct compiler_unit *u, location loc, #define LOAD_ZERO_SUPER_METHOD -4 static int -compiler_addop_name(struct compiler_unit *u, location loc, +compiler_addop_name(struct compiler *c, location loc, int opcode, PyObject *dict, PyObject *o) { - PyObject *mangled = _Py_MaybeMangle(u->u_private, u->u_ste, o); + PyObject *mangled = compiler_maybe_mangle(c, o); if (!mangled) { return ERROR; } @@ -925,7 +898,7 @@ compiler_addop_name(struct compiler_unit *u, location loc, arg <<= 2; arg |= 1; } - return codegen_addop_i(u->u_instr_sequence, opcode, arg, loc); + return codegen_addop_i(INSTR_SEQUENCE(c), opcode, arg, loc); } /* Add an opcode with an integer argument */ @@ -968,7 +941,7 @@ codegen_addop_j(instr_sequence *seq, location loc, #define ADDOP_IN_SCOPE(C, LOC, OP) RETURN_IF_ERROR_IN_SCOPE((C), codegen_addop_noarg(INSTR_SEQUENCE(C), (OP), (LOC))) #define ADDOP_LOAD_CONST(C, LOC, O) \ - RETURN_IF_ERROR(compiler_addop_load_const((C)->c_const_cache, (C)->u, (LOC), (O))) + RETURN_IF_ERROR(compiler_addop_load_const((C), (LOC), (O))) /* Same as ADDOP_LOAD_CONST, but steals a reference. */ #define ADDOP_LOAD_CONST_NEW(C, LOC, O) { \ @@ -976,7 +949,7 @@ codegen_addop_j(instr_sequence *seq, location loc, if (__new_const == NULL) { \ return ERROR; \ } \ - if (compiler_addop_load_const((C)->c_const_cache, (C)->u, (LOC), __new_const) < 0) { \ + if (compiler_addop_load_const((C), (LOC), __new_const) < 0) { \ Py_DECREF(__new_const); \ return ERROR; \ } \ @@ -985,7 +958,7 @@ codegen_addop_j(instr_sequence *seq, location loc, #define ADDOP_N(C, LOC, OP, O, TYPE) { \ assert(!OPCODE_HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \ - if (compiler_addop_o((C)->u, (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O)) < 0) { \ + if (compiler_addop_o((C), (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O)) < 0) { \ Py_DECREF((O)); \ return ERROR; \ } \ @@ -993,7 +966,7 @@ codegen_addop_j(instr_sequence *seq, location loc, } #define ADDOP_NAME(C, LOC, OP, O, TYPE) \ - RETURN_IF_ERROR(compiler_addop_name((C)->u, (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O))) + RETURN_IF_ERROR(compiler_addop_name((C), (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O))) #define ADDOP_I(C, LOC, OP, O) \ RETURN_IF_ERROR(codegen_addop_i(INSTR_SEQUENCE(C), (OP), (O), (LOC))) @@ -1052,8 +1025,8 @@ codegen_addop_j(instr_sequence *seq, location loc, static int -compiler_enter_scope(struct compiler *c, identifier name, - int scope_type, void *key, int lineno) +compiler_enter_scope(struct compiler *c, identifier name, int scope_type, + void *key, int lineno, PyObject *private) { location loc = LOCATION(lineno, lineno, 0, 0); @@ -1132,7 +1105,6 @@ compiler_enter_scope(struct compiler *c, identifier name, return ERROR; } - u->u_private = NULL; u->u_deferred_annotations = NULL; if (scope_type == COMPILER_SCOPE_CLASS) { u->u_static_attributes = PySet_New(0); @@ -1146,6 +1118,10 @@ compiler_enter_scope(struct compiler *c, identifier name, } u->u_instr_sequence = (instr_sequence*)_PyInstructionSequence_New(); + if (!u->u_instr_sequence) { + compiler_unit_free(u); + return ERROR; + } /* Push the old compiler_unit on the stack. */ if (c->u) { @@ -1156,8 +1132,13 @@ compiler_enter_scope(struct compiler *c, identifier name, return ERROR; } Py_DECREF(capsule); - u->u_private = Py_XNewRef(c->u->u_private); + if (private == NULL) { + private = c->u->u_private; + } } + + u->u_private = Py_XNewRef(private); + c->u = u; c->c_nestlevel++; @@ -1436,7 +1417,7 @@ compiler_setup_annotations_scope(struct compiler *c, location loc, void *key, PyObject *name) { if (compiler_enter_scope(c, name, COMPILER_SCOPE_ANNOTATIONS, - key, loc.lineno) == -1) { + key, loc.lineno, NULL) == -1) { return ERROR; } c->u->u_metadata.u_posonlyargcount = 1; @@ -1490,7 +1471,7 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) /* If from __future__ import annotations is active, * every annotated class and module should have __annotations__. * Else __annotate__ is created when necessary. */ - if ((c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) && c->u->u_ste->ste_annotations_used) { + if ((FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS) && SYMTABLE_ENTRY(c)->ste_annotations_used) { ADDOP(c, loc, SETUP_ANNOTATIONS); } if (!asdl_seq_LEN(stmts)) { @@ -1522,7 +1503,7 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) // If there are annotations and the future import is not on, we // collect the annotations in a separate pass and generate an // __annotate__ function. See PEP 649. - if (!(c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) && + if (!(FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS) && c->u->u_deferred_annotations != NULL) { // It's possible that ste_annotations_block is set but @@ -1530,11 +1511,12 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) // set if there are only non-simple annotations (i.e., annotations // for attributes, subscripts, or parenthesized names). However, the // reverse should not be possible. - assert(c->u->u_ste->ste_annotation_block != NULL); + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + assert(ste->ste_annotation_block != NULL); PyObject *deferred_anno = Py_NewRef(c->u->u_deferred_annotations); - void *key = (void *)((uintptr_t)c->u->u_ste->ste_id + 1); + void *key = (void *)((uintptr_t)ste->ste_id + 1); if (compiler_setup_annotations_scope(c, loc, key, - c->u->u_ste->ste_annotation_block->ste_name) == -1) { + ste->ste_annotation_block->ste_name) == -1) { Py_DECREF(deferred_anno); return ERROR; } @@ -1597,7 +1579,7 @@ compiler_enter_anonymous_scope(struct compiler* c, mod_ty mod) _Py_DECLARE_STR(anon_module, ""); RETURN_IF_ERROR( compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE, - mod, 1)); + mod, 1, NULL)); return SUCCESS; } @@ -1618,13 +1600,8 @@ compiler_mod(struct compiler *c, mod_ty mod) return co; } -/* The test for LOCAL must come before the test for FREE in order to - handle classes where name is both local and free. The local var is - a method and the free var is a free var referenced within a method. -*/ - static int -get_ref_type(struct compiler *c, PyObject *name) +compiler_get_ref_type(struct compiler *c, PyObject *name) { int scope; if (c->u->u_scope_type == COMPILER_SCOPE_CLASS && @@ -1632,15 +1609,16 @@ get_ref_type(struct compiler *c, PyObject *name) _PyUnicode_EqualToASCIIString(name, "__classdict__"))) { return CELL; } - scope = _PyST_GetScope(c->u->u_ste, name); + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + scope = _PyST_GetScope(ste, name); if (scope == 0) { PyErr_Format(PyExc_SystemError, "_PyST_GetScope(name=%R) failed: " "unknown scope in unit %S (%R); " "symbols: %R; locals: %R; globals: %R", name, - c->u->u_metadata.u_name, c->u->u_ste->ste_id, - c->u->u_ste->ste_symbols, c->u->u_metadata.u_varnames, c->u->u_metadata.u_names); + c->u->u_metadata.u_name, ste->ste_id, + ste->ste_symbols, c->u->u_metadata.u_varnames, c->u->u_metadata.u_names); return ERROR; } return scope; @@ -1674,7 +1652,7 @@ compiler_make_closure(struct compiler *c, location loc, class. It should be handled by the closure, as well as by the normal name lookup logic. */ - int reftype = get_ref_type(c, name); + int reftype = compiler_get_ref_type(c, name); if (reftype == -1) { return ERROR; } @@ -1770,7 +1748,7 @@ compiler_kwonlydefaults(struct compiler *c, location loc, arg_ty arg = asdl_seq_GET(kwonlyargs, i); expr_ty default_ = asdl_seq_GET(kw_defaults, i); if (default_) { - PyObject *mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, arg->arg); + PyObject *mangled = compiler_maybe_mangle(c, arg->arg); if (!mangled) { goto error; } @@ -1827,14 +1805,14 @@ compiler_argannotation(struct compiler *c, identifier id, if (!annotation) { return SUCCESS; } - PyObject *mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, id); + PyObject *mangled = compiler_maybe_mangle(c, id); if (!mangled) { return ERROR; } ADDOP_LOAD_CONST(c, loc, mangled); Py_DECREF(mangled); - if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { + if (FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS) { VISIT(c, annexpr, annotation); } else { @@ -1916,7 +1894,7 @@ compiler_annotations(struct compiler *c, location loc, Py_ssize_t annotations_len = 0; PySTEntryObject *ste; - if (_PySymtable_LookupOptional(c->c_st, args, &ste) < 0) { + if (_PySymtable_LookupOptional(SYMTABLE(c), args, &ste) < 0) { return ERROR; } assert(ste != NULL); @@ -2052,7 +2030,7 @@ compiler_type_param_bound_or_default(struct compiler *c, expr_ty e, bool allow_starred) { if (compiler_enter_scope(c, name, COMPILER_SCOPE_ANNOTATIONS, - key, e->lineno) == -1) { + key, e->lineno, NULL) == -1) { return ERROR; } if (allow_starred && e->kind == Starred_kind) { @@ -2197,7 +2175,7 @@ compiler_function_body(struct compiler *c, stmt_ty s, int is_async, Py_ssize_t f } RETURN_IF_ERROR( - compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno)); + compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno, NULL)); Py_ssize_t first_instr = 0; PyObject *docstring = _PyAST_GetDocString(body); @@ -2215,7 +2193,7 @@ compiler_function_body(struct compiler *c, stmt_ty s, int is_async, Py_ssize_t f docstring = NULL; } } - if (compiler_add_const(c->c_const_cache, c->u, docstring ? docstring : Py_None) < 0) { + if (compiler_add_const(c, docstring ? docstring : Py_None) < 0) { Py_XDECREF(docstring); compiler_exit_scope(c); return ERROR; @@ -2228,7 +2206,8 @@ compiler_function_body(struct compiler *c, stmt_ty s, int is_async, Py_ssize_t f NEW_JUMP_TARGET_LABEL(c, start); USE_LABEL(c, start); - bool add_stopiteration_handler = c->u->u_ste->ste_coroutine || c->u->u_ste->ste_generator; + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + bool add_stopiteration_handler = ste->ste_coroutine || ste->ste_generator; if (add_stopiteration_handler) { /* wrap_in_stopiteration_handler will push a block, so we need to account for that */ RETURN_IF_ERROR( @@ -2324,7 +2303,7 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) return ERROR; } if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_ANNOTATIONS, - (void *)type_params, firstlineno) == -1) { + (void *)type_params, firstlineno, NULL) == -1) { Py_DECREF(type_params_name); return ERROR; } @@ -2407,12 +2386,10 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) /* 1. compile the class body into a code object */ RETURN_IF_ERROR( - compiler_enter_scope(c, s->v.ClassDef.name, - COMPILER_SCOPE_CLASS, (void *)s, firstlineno)); + compiler_enter_scope(c, s->v.ClassDef.name, COMPILER_SCOPE_CLASS, + (void *)s, firstlineno, s->v.ClassDef.name)); location loc = LOCATION(firstlineno, firstlineno, 0, 0); - /* use the class name for name mangling */ - Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); /* load (global) __name__ ... */ if (compiler_nameop(c, loc, &_Py_ID(__name__), Load) < 0) { compiler_exit_scope(c); @@ -2441,14 +2418,14 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) return ERROR; } } - if (c->u->u_ste->ste_needs_classdict) { + if (SYMTABLE_ENTRY(c)->ste_needs_classdict) { ADDOP(c, loc, LOAD_LOCALS); // We can't use compiler_nameop here because we need to generate a // STORE_DEREF in a class namespace, and compiler_nameop() won't do // that by default. PyObject *cellvars = c->u->u_metadata.u_cellvars; - if (compiler_addop_o(c->u, loc, STORE_DEREF, cellvars, + if (compiler_addop_o(c, loc, STORE_DEREF, cellvars, &_Py_ID(__classdict__)) < 0) { compiler_exit_scope(c); return ERROR; @@ -2473,7 +2450,7 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) } /* The following code is artificial */ /* Set __classdictcell__ if necessary */ - if (c->u->u_ste->ste_needs_classdict) { + if (SYMTABLE_ENTRY(c)->ste_needs_classdict) { /* Store __classdictcell__ into class namespace */ int i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__classdict__)); if (i < 0) { @@ -2487,7 +2464,7 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) } } /* Return __classcell__ if it is referenced, otherwise return None */ - if (c->u->u_ste->ste_needs_class_closure) { + if (SYMTABLE_ENTRY(c)->ste_needs_class_closure) { /* Store __classcell__ into class namespace & return it */ int i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__class__)); if (i < 0) { @@ -2558,12 +2535,11 @@ compiler_class(struct compiler *c, stmt_ty s) return ERROR; } if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_ANNOTATIONS, - (void *)type_params, firstlineno) == -1) { + (void *)type_params, firstlineno, s->v.ClassDef.name) == -1) { Py_DECREF(type_params_name); return ERROR; } Py_DECREF(type_params_name); - Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); RETURN_IF_ERROR_IN_SCOPE(c, compiler_type_params(c, type_params)); _Py_DECLARE_STR(type_params, ".type_params"); RETURN_IF_ERROR_IN_SCOPE(c, compiler_nameop(c, loc, &_Py_STR(type_params), Store)); @@ -2643,10 +2619,10 @@ compiler_typealias_body(struct compiler *c, stmt_ty s) location loc = LOC(s); PyObject *name = s->v.TypeAlias.name->v.Name.id; RETURN_IF_ERROR( - compiler_enter_scope(c, name, COMPILER_SCOPE_FUNCTION, s, loc.lineno)); + compiler_enter_scope(c, name, COMPILER_SCOPE_FUNCTION, s, loc.lineno, NULL)); /* Make None the first constant, so the evaluate function can't have a docstring. */ - RETURN_IF_ERROR(compiler_add_const(c->c_const_cache, c->u, Py_None)); + RETURN_IF_ERROR(compiler_add_const(c, Py_None)); VISIT_IN_SCOPE(c, expr, s->v.TypeAlias.value); ADDOP_IN_SCOPE(c, loc, RETURN_VALUE); PyCodeObject *co = optimize_and_assemble(c, 0); @@ -2678,13 +2654,13 @@ compiler_typealias(struct compiler *c, stmt_ty s) return ERROR; } if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_ANNOTATIONS, - (void *)type_params, loc.lineno) == -1) { + (void *)type_params, loc.lineno, NULL) == -1) { Py_DECREF(type_params_name); return ERROR; } Py_DECREF(type_params_name); RETURN_IF_ERROR_IN_SCOPE( - c, compiler_addop_load_const(c->c_const_cache, c->u, loc, name) + c, compiler_addop_load_const(c, loc, name) ); RETURN_IF_ERROR_IN_SCOPE(c, compiler_type_params(c, type_params)); } @@ -2947,17 +2923,17 @@ compiler_lambda(struct compiler *c, expr_ty e) _Py_DECLARE_STR(anon_lambda, ""); RETURN_IF_ERROR( compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA, - (void *)e, e->lineno)); + (void *)e, e->lineno, NULL)); /* Make None the first constant, so the lambda can't have a docstring. */ - RETURN_IF_ERROR(compiler_add_const(c->c_const_cache, c->u, Py_None)); + RETURN_IF_ERROR(compiler_add_const(c, Py_None)); c->u->u_metadata.u_argcount = asdl_seq_LEN(args->args); c->u->u_metadata.u_posonlyargcount = asdl_seq_LEN(args->posonlyargs); c->u->u_metadata.u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs); VISIT_IN_SCOPE(c, expr, e->v.Lambda.body); - if (c->u->u_ste->ste_generator) { + if (SYMTABLE_ENTRY(c)->ste_generator) { co = optimize_and_assemble(c, 0); } else { @@ -3058,11 +3034,6 @@ static int compiler_async_for(struct compiler *c, stmt_ty s) { location loc = LOC(s); - if (IS_TOP_LEVEL_AWAIT(c)){ - assert(c->u->u_ste->ste_coroutine == 1); - } else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION) { - return compiler_error(c, loc, "'async for' outside async function"); - } NEW_JUMP_TARGET_LABEL(c, start); NEW_JUMP_TARGET_LABEL(c, except); @@ -3138,12 +3109,12 @@ compiler_return(struct compiler *c, stmt_ty s) location loc = LOC(s); int preserve_tos = ((s->v.Return.value != NULL) && (s->v.Return.value->kind != Constant_kind)); - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + if (!_PyST_IsFunctionLike(ste)) { return compiler_error(c, loc, "'return' outside function"); } - if (s->v.Return.value != NULL && - c->u->u_ste->ste_coroutine && c->u->u_ste->ste_generator) - { + if (s->v.Return.value != NULL && ste->ste_coroutine && ste->ste_generator) { return compiler_error(c, loc, "'return' with value in async generator"); } @@ -4093,7 +4064,8 @@ addop_binary(struct compiler *c, location loc, operator_ty binop, static int addop_yield(struct compiler *c, location loc) { - if (c->u->u_ste->ste_generator && c->u->u_ste->ste_coroutine) { + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + if (ste->ste_generator && ste->ste_coroutine) { ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_ASYNC_GEN_WRAP); } ADDOP_I(c, loc, YIELD_VALUE, 0); @@ -4120,14 +4092,14 @@ compiler_nameop(struct compiler *c, location loc, return ERROR; } - mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, name); + mangled = compiler_maybe_mangle(c, name); if (!mangled) { return ERROR; } op = 0; optype = OP_NAME; - scope = _PyST_GetScope(c->u->u_ste, mangled); + scope = _PyST_GetScope(SYMTABLE_ENTRY(c), mangled); switch (scope) { case FREE: dict = c->u->u_metadata.u_freevars; @@ -4138,7 +4110,7 @@ compiler_nameop(struct compiler *c, location loc, optype = OP_DEREF; break; case LOCAL: - if (_PyST_IsFunctionLike(c->u->u_ste)) { + if (_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { optype = OP_FAST; } else { @@ -4154,7 +4126,7 @@ compiler_nameop(struct compiler *c, location loc, } break; case GLOBAL_IMPLICIT: - if (_PyST_IsFunctionLike(c->u->u_ste)) + if (_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) optype = OP_GLOBAL; break; case GLOBAL_EXPLICIT: @@ -4172,17 +4144,17 @@ compiler_nameop(struct compiler *c, location loc, case OP_DEREF: switch (ctx) { case Load: - if (c->u->u_ste->ste_type == ClassBlock && !c->u->u_in_inlined_comp) { + if (SYMTABLE_ENTRY(c)->ste_type == ClassBlock && !c->u->u_in_inlined_comp) { op = LOAD_FROM_DICT_OR_DEREF; // First load the locals if (codegen_addop_noarg(INSTR_SEQUENCE(c), LOAD_LOCALS, loc) < 0) { goto error; } } - else if (c->u->u_ste->ste_can_see_class_scope) { + else if (SYMTABLE_ENTRY(c)->ste_can_see_class_scope) { op = LOAD_FROM_DICT_OR_DEREF; // First load the classdict - if (compiler_addop_o(c->u, loc, LOAD_DEREF, + if (compiler_addop_o(c, loc, LOAD_DEREF, c->u->u_metadata.u_freevars, &_Py_ID(__classdict__)) < 0) { goto error; } @@ -4206,10 +4178,10 @@ compiler_nameop(struct compiler *c, location loc, case OP_GLOBAL: switch (ctx) { case Load: - if (c->u->u_ste->ste_can_see_class_scope && scope == GLOBAL_IMPLICIT) { + if (SYMTABLE_ENTRY(c)->ste_can_see_class_scope && scope == GLOBAL_IMPLICIT) { op = LOAD_FROM_DICT_OR_GLOBALS; // First load the classdict - if (compiler_addop_o(c->u, loc, LOAD_DEREF, + if (compiler_addop_o(c, loc, LOAD_DEREF, c->u->u_metadata.u_freevars, &_Py_ID(__classdict__)) < 0) { goto error; } @@ -4224,7 +4196,7 @@ compiler_nameop(struct compiler *c, location loc, case OP_NAME: switch (ctx) { case Load: - op = (c->u->u_ste->ste_type == ClassBlock + op = (SYMTABLE_ENTRY(c)->ste_type == ClassBlock && c->u->u_in_inlined_comp) ? LOAD_GLOBAL : LOAD_NAME; @@ -4729,7 +4701,7 @@ is_import_originated(struct compiler *c, expr_ty e) return 0; } - long flags = _PyST_GetSymbol(c->c_st->st_top, e->v.Name.id); + long flags = _PyST_GetSymbol(SYMTABLE(c)->st_top, e->v.Name.id); return flags & DEF_IMPORT; } @@ -4748,11 +4720,11 @@ can_optimize_super_call(struct compiler *c, expr_ty attr) PyObject *super_name = e->v.Call.func->v.Name.id; // detect statically-visible shadowing of 'super' name - int scope = _PyST_GetScope(c->u->u_ste, super_name); + int scope = _PyST_GetScope(SYMTABLE_ENTRY(c), super_name); if (scope != GLOBAL_IMPLICIT) { return 0; } - scope = _PyST_GetScope(c->c_st->st_top, super_name); + scope = _PyST_GetScope(SYMTABLE(c)->st_top, super_name); if (scope != 0) { return 0; } @@ -4780,7 +4752,7 @@ can_optimize_super_call(struct compiler *c, expr_ty attr) return 0; } // __class__ cell should be available - if (get_ref_type(c, &_Py_ID(__class__)) == FREE) { + if (compiler_get_ref_type(c, &_Py_ID(__class__)) == FREE) { return 1; } return 0; @@ -4802,7 +4774,7 @@ load_args_for_super(struct compiler *c, expr_ty e) { // load __class__ cell PyObject *name = &_Py_ID(__class__); - assert(get_ref_type(c, name) == FREE); + assert(compiler_get_ref_type(c, name) == FREE); RETURN_IF_ERROR(compiler_nameop(c, loc, name, Load)); // load self (first argument) @@ -5474,7 +5446,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, PySTEntryObject *entry, inlined_comprehension_state *state) { - int in_class_block = (c->u->u_ste->ste_type == ClassBlock) && !c->u->u_in_inlined_comp; + int in_class_block = (SYMTABLE_ENTRY(c)->ste_type == ClassBlock) && !c->u->u_in_inlined_comp; c->u->u_in_inlined_comp++; // iterate over names bound in the comprehension and ensure we isolate // them from the outer scope as needed @@ -5484,7 +5456,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, assert(PyLong_Check(v)); long symbol = PyLong_AS_LONG(v); long scope = (symbol >> SCOPE_OFFSET) & SCOPE_MASK; - PyObject *outv = PyDict_GetItemWithError(c->u->u_ste->ste_symbols, k); + PyObject *outv = PyDict_GetItemWithError(SYMTABLE_ENTRY(c)->ste_symbols, k); if (outv == NULL) { if (PyErr_Occurred()) { return ERROR; @@ -5513,7 +5485,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, // the outer version; we'll restore it after running the // comprehension Py_INCREF(outv); - if (PyDict_SetItem(c->u->u_ste->ste_symbols, k, v) < 0) { + if (PyDict_SetItem(SYMTABLE_ENTRY(c)->ste_symbols, k, v) < 0) { Py_DECREF(outv); return ERROR; } @@ -5526,7 +5498,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, // locals handling for names bound in comprehension (DEF_LOCAL | // DEF_NONLOCAL occurs in assignment expression to nonlocal) if ((symbol & DEF_LOCAL && !(symbol & DEF_NONLOCAL)) || in_class_block) { - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + if (!_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { // non-function scope: override this name to use fast locals PyObject *orig; if (PyDict_GetItemRef(c->u->u_metadata.u_fasthidden, k, &orig) < 0) { @@ -5628,7 +5600,7 @@ pop_inlined_comprehension_state(struct compiler *c, location loc, Py_ssize_t pos = 0; if (state.temp_symbols) { while (PyDict_Next(state.temp_symbols, &pos, &k, &v)) { - if (PyDict_SetItem(c->u->u_ste->ste_symbols, k, v)) { + if (PyDict_SetItem(SYMTABLE_ENTRY(c)->ste_symbols, k, v)) { return ERROR; } } @@ -5697,7 +5669,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, comprehension_ty outermost; int scope_type = c->u->u_scope_type; int is_top_level_await = IS_TOP_LEVEL_AWAIT(c); - PySTEntryObject *entry = _PySymtable_Lookup(c->c_st, (void *)e); + PySTEntryObject *entry = _PySymtable_Lookup(SYMTABLE(c), (void *)e); if (entry == NULL) { goto error; } @@ -5717,7 +5689,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, } else { if (compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION, - (void *)e, e->lineno) < 0) + (void *)e, e->lineno, NULL) < 0) { goto error; } @@ -5781,9 +5753,6 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, co = optimize_and_assemble(c, 1); compiler_exit_scope(c); - if (is_top_level_await && is_async_generator){ - assert(c->u->u_ste->ste_coroutine == 1); - } if (co == NULL) { goto error; } @@ -5925,11 +5894,6 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos) withitem_ty item = asdl_seq_GET(s->v.AsyncWith.items, pos); assert(s->kind == AsyncWith_kind); - if (IS_TOP_LEVEL_AWAIT(c)){ - assert(c->u->u_ste->ste_coroutine == 1); - } else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION){ - return compiler_error(c, loc, "'async with' outside async function"); - } NEW_JUMP_TARGET_LABEL(c, block); NEW_JUMP_TARGET_LABEL(c, final); @@ -6142,7 +6106,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) case DictComp_kind: return compiler_dictcomp(c, e); case Yield_kind: - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + if (!_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { return compiler_error(c, loc, "'yield' outside function"); } if (e->v.Yield.value) { @@ -6154,7 +6118,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) ADDOP_YIELD(c, loc); break; case YieldFrom_kind: - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + if (!_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { return compiler_error(c, loc, "'yield' outside function"); } if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) { @@ -6167,7 +6131,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) break; case Await_kind: if (!IS_TOP_LEVEL_AWAIT(c)){ - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + if (!_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { return compiler_error(c, loc, "'await' outside function"); } @@ -6207,13 +6171,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) if (e->v.Attribute.value->kind == Name_kind && _PyUnicode_EqualToASCIIString(e->v.Attribute.value->v.Name.id, "self")) { - struct compiler_unit *class_u = get_class_compiler_unit(c); - if (class_u != NULL) { - assert(class_u->u_scope_type == COMPILER_SCOPE_CLASS); - assert(class_u->u_static_attributes); - RETURN_IF_ERROR( - PySet_Add(class_u->u_static_attributes, e->v.Attribute.attr)); - } + RETURN_IF_ERROR(compiler_add_static_attribute_to_class(c, e->v.Attribute.attr)); } VISIT(c, expr, e->v.Attribute.value); loc = LOC(e); @@ -6360,7 +6318,7 @@ check_annotation(struct compiler *c, stmt_ty s) { /* Annotations of complex targets does not produce anything under annotations future */ - if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { + if (FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS) { return SUCCESS; } @@ -6407,7 +6365,7 @@ compiler_annassign(struct compiler *c, stmt_ty s) { location loc = LOC(s); expr_ty targ = s->v.AnnAssign.target; - bool future_annotations = c->c_future.ff_features & CO_FUTURE_ANNOTATIONS; + bool future_annotations = FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS; PyObject *mangled; assert(s->kind == AnnAssign_kind); @@ -6429,7 +6387,7 @@ compiler_annassign(struct compiler *c, stmt_ty s) if (future_annotations) { VISIT(c, annexpr, s->v.AnnAssign.annotation); ADDOP_NAME(c, loc, LOAD_NAME, &_Py_ID(__annotations__), names); - mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, targ->v.Name.id); + mangled = compiler_maybe_mangle(c, targ->v.Name.id); ADDOP_LOAD_CONST_NEW(c, loc, mangled); ADDOP(c, loc, STORE_SUBSCR); } @@ -7471,19 +7429,47 @@ consts_dict_keys_inorder(PyObject *dict) return consts; } +static PyObject * +compiler_maybe_mangle(struct compiler *c, PyObject *name) +{ + return _Py_MaybeMangle(c->u->u_private, c->u->u_ste, name); +} + +static instr_sequence * +compiler_instr_sequence(struct compiler *c) +{ + return c->u->u_instr_sequence; +} + +static int +compiler_future_features(struct compiler *c) +{ + return c->c_future.ff_features; +} + +static struct symtable * +compiler_symtable(struct compiler *c) +{ + return c->c_st; +} + +static PySTEntryObject * +compiler_symtable_entry(struct compiler *c) +{ + return c->u->u_ste; +} + static int compute_code_flags(struct compiler *c) { - PySTEntryObject *ste = c->u->u_ste; + PySTEntryObject *ste = SYMTABLE_ENTRY(c); int flags = 0; - if (_PyST_IsFunctionLike(c->u->u_ste)) { + if (_PyST_IsFunctionLike(ste)) { flags |= CO_NEWLOCALS | CO_OPTIMIZED; if (ste->ste_nested) flags |= CO_NESTED; if (ste->ste_generator && !ste->ste_coroutine) flags |= CO_GENERATOR; - if (!ste->ste_generator && ste->ste_coroutine) - flags |= CO_COROUTINE; if (ste->ste_generator && ste->ste_coroutine) flags |= CO_ASYNC_GENERATOR; if (ste->ste_varargs) @@ -7492,49 +7478,33 @@ compute_code_flags(struct compiler *c) flags |= CO_VARKEYWORDS; } - /* (Only) inherit compilerflags in PyCF_MASK */ - flags |= (c->c_flags.cf_flags & PyCF_MASK); - - if ((IS_TOP_LEVEL_AWAIT(c)) && - ste->ste_coroutine && - !ste->ste_generator) { + if (ste->ste_coroutine && !ste->ste_generator) { + assert (IS_TOP_LEVEL_AWAIT(c) || _PyST_IsFunctionLike(ste)); flags |= CO_COROUTINE; } + /* (Only) inherit compilerflags in PyCF_MASK */ + flags |= (c->c_flags.cf_flags & PyCF_MASK); + return flags; } -// Merge *obj* with constant cache. -// Unlike merge_consts_recursive(), this function doesn't work recursively. +// Merge *obj* with constant cache, without recursion. int _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj) { - assert(PyDict_CheckExact(const_cache)); - PyObject *key = _PyCode_ConstantKey(*obj); + PyObject *key = const_cache_insert(const_cache, *obj, false); if (key == NULL) { return ERROR; } - - PyObject *t; - int res = PyDict_SetDefaultRef(const_cache, key, key, &t); - Py_DECREF(key); - if (res < 0) { - return ERROR; - } - if (res == 0) { // inserted: obj is new constant. - Py_DECREF(t); - return SUCCESS; - } - - if (PyTuple_CheckExact(t)) { - PyObject *item = PyTuple_GET_ITEM(t, 1); + if (PyTuple_CheckExact(key)) { + PyObject *item = PyTuple_GET_ITEM(key, 1); Py_SETREF(*obj, Py_NewRef(item)); - Py_DECREF(t); + Py_DECREF(key); } else { - Py_SETREF(*obj, t); + Py_SETREF(*obj, key); } - return SUCCESS; } diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 3b999465aac815..8f6bc75b528d9b 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -3055,8 +3055,11 @@ JUMP_TO_JUMP_TARGET(); } if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + it->it_index = -1; + if (1) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } break; } diff --git a/Python/import.c b/Python/import.c index 20ad10020044df..40b7feac001d6e 100644 --- a/Python/import.c +++ b/Python/import.c @@ -1532,6 +1532,35 @@ switch_to_main_interpreter(PyThreadState *tstate) return main_tstate; } +static void +switch_back_from_main_interpreter(PyThreadState *tstate, + PyThreadState *main_tstate, + PyObject *tempobj) +{ + assert(main_tstate == PyThreadState_GET()); + assert(_Py_IsMainInterpreter(main_tstate->interp)); + assert(tstate->interp != main_tstate->interp); + + /* Handle any exceptions, which we cannot propagate directly + * to the subinterpreter. */ + if (PyErr_Occurred()) { + if (PyErr_ExceptionMatches(PyExc_MemoryError)) { + /* We trust it will be caught again soon. */ + PyErr_Clear(); + } + else { + /* Printing the exception should be sufficient. */ + PyErr_PrintEx(0); + } + } + + Py_XDECREF(tempobj); + + PyThreadState_Clear(main_tstate); + (void)PyThreadState_Swap(tstate); + PyThreadState_Delete(main_tstate); +} + static PyObject * get_core_module_dict(PyInterpreterState *interp, PyObject *name, PyObject *path) @@ -2027,27 +2056,10 @@ import_run_extension(PyThreadState *tstate, PyModInitFunction p0, /* Switch back to the subinterpreter. */ if (switched) { assert(main_tstate != tstate); - - /* Handle any exceptions, which we cannot propagate directly - * to the subinterpreter. */ - if (PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_MemoryError)) { - /* We trust it will be caught again soon. */ - PyErr_Clear(); - } - else { - /* Printing the exception should be sufficient. */ - PyErr_PrintEx(0); - } - } - + switch_back_from_main_interpreter(tstate, main_tstate, mod); /* Any module we got from the init function will have to be * reloaded in the subinterpreter. */ - Py_CLEAR(mod); - - PyThreadState_Clear(main_tstate); - (void)PyThreadState_Swap(tstate); - PyThreadState_Delete(main_tstate); + mod = NULL; } /*****************************************************************/ @@ -2141,9 +2153,21 @@ clear_singlephase_extension(PyInterpreterState *interp, } } + /* We must use the main interpreter to clean up the cache. + * See the note in import_run_extension(). */ + PyThreadState *tstate = PyThreadState_GET(); + PyThreadState *main_tstate = switch_to_main_interpreter(tstate); + if (main_tstate == NULL) { + return -1; + } + /* Clear the cached module def. */ _extensions_cache_delete(path, name); + if (main_tstate != tstate) { + switch_back_from_main_interpreter(tstate, main_tstate, NULL); + } + return 0; } diff --git a/Python/lock.c b/Python/lock.c index 7c6a5175e88ff1..57675fe1873fa2 100644 --- a/Python/lock.c +++ b/Python/lock.c @@ -514,6 +514,7 @@ void _PySeqLock_LockWrite(_PySeqLock *seqlock) } else if (_Py_atomic_compare_exchange_uint32(&seqlock->sequence, &prev, prev + 1)) { // We've locked the cache + _Py_atomic_fence_release(); break; } else { @@ -547,28 +548,31 @@ uint32_t _PySeqLock_BeginRead(_PySeqLock *seqlock) return sequence; } -uint32_t _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous) +int _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous) { - // Synchronize again and validate that the entry hasn't been updated - // while we were readying the values. - if (_Py_atomic_load_uint32_acquire(&seqlock->sequence) == previous) { + // gh-121368: We need an explicit acquire fence here to ensure that + // this load of the sequence number is not reordered before any loads + // within the read lock. + _Py_atomic_fence_acquire(); + + if (_Py_atomic_load_uint32_relaxed(&seqlock->sequence) == previous) { return 1; - } + } - _Py_yield(); - return 0; + _Py_yield(); + return 0; } -uint32_t _PySeqLock_AfterFork(_PySeqLock *seqlock) +int _PySeqLock_AfterFork(_PySeqLock *seqlock) { // Synchronize again and validate that the entry hasn't been updated // while we were readying the values. - if (SEQLOCK_IS_UPDATING(seqlock->sequence)) { + if (SEQLOCK_IS_UPDATING(seqlock->sequence)) { seqlock->sequence = 0; return 1; - } + } - return 0; + return 0; } #undef PyMutex_Lock diff --git a/Python/symtable.c b/Python/symtable.c index 65677f86092b0b..10103dbc2582a2 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -70,17 +70,21 @@ #define DUPLICATE_TYPE_PARAM \ "duplicate type parameter '%U'" +#define ASYNC_WITH_OUTISDE_ASYNC_FUNC \ +"'async with' outside async function" -#define LOCATION(x) \ - (x)->lineno, (x)->col_offset, (x)->end_lineno, (x)->end_col_offset +#define ASYNC_FOR_OUTISDE_ASYNC_FUNC \ +"'async for' outside async function" -#define ST_LOCATION(x) \ - (x)->ste_lineno, (x)->ste_col_offset, (x)->ste_end_lineno, (x)->ste_end_col_offset +#define LOCATION(x) SRC_LOCATION_FROM_AST(x) + +#define SET_ERROR_LOCATION(FNAME, L) \ + PyErr_RangedSyntaxLocationObject((FNAME), \ + (L).lineno, (L).col_offset + 1, (L).end_lineno, (L).end_col_offset + 1) static PySTEntryObject * ste_new(struct symtable *st, identifier name, _Py_block_ty block, - void *key, int lineno, int col_offset, - int end_lineno, int end_col_offset) + void *key, _Py_SourceLocation loc) { PySTEntryObject *ste = NULL; PyObject *k = NULL; @@ -112,13 +116,8 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block, ste->ste_free = 0; ste->ste_varargs = 0; ste->ste_varkeywords = 0; - ste->ste_opt_lineno = 0; - ste->ste_opt_col_offset = 0; ste->ste_annotations_used = 0; - ste->ste_lineno = lineno; - ste->ste_col_offset = col_offset; - ste->ste_end_lineno = end_lineno; - ste->ste_end_col_offset = end_col_offset; + ste->ste_loc = loc; if (st->st_cur != NULL && (st->st_cur->ste_nested || @@ -158,7 +157,7 @@ static PyObject * ste_repr(PySTEntryObject *ste) { return PyUnicode_FromFormat("", - ste->ste_name, ste->ste_id, ste->ste_lineno); + ste->ste_name, ste->ste_id, ste->ste_loc.lineno); } static void @@ -186,7 +185,7 @@ static PyMemberDef ste_memberlist[] = { {"children", _Py_T_OBJECT, OFF(ste_children), Py_READONLY}, {"nested", Py_T_INT, OFF(ste_nested), Py_READONLY}, {"type", Py_T_INT, OFF(ste_type), Py_READONLY}, - {"lineno", Py_T_INT, OFF(ste_lineno), Py_READONLY}, + {"lineno", Py_T_INT, OFF(ste_loc.lineno), Py_READONLY}, {NULL} }; @@ -233,9 +232,7 @@ PyTypeObject PySTEntry_Type = { static int symtable_analyze(struct symtable *st); static int symtable_enter_block(struct symtable *st, identifier name, - _Py_block_ty block, void *ast, - int lineno, int col_offset, - int end_lineno, int end_col_offset); + _Py_block_ty block, void *ast, _Py_SourceLocation loc); static int symtable_exit_block(struct symtable *st); static int symtable_visit_stmt(struct symtable *st, stmt_ty s); static int symtable_visit_expr(struct symtable *st, expr_ty s); @@ -259,6 +256,7 @@ static int symtable_visit_withitem(struct symtable *st, withitem_ty item); static int symtable_visit_match_case(struct symtable *st, match_case_ty m); static int symtable_visit_pattern(struct symtable *st, pattern_ty s); static int symtable_raise_if_annotation_block(struct symtable *st, const char *, expr_ty); +static int symtable_raise_if_not_coroutine(struct symtable *st, const char *msg, _Py_SourceLocation loc); static int symtable_raise_if_comprehension_block(struct symtable *st, expr_ty); /* For debugging purposes only */ @@ -311,8 +309,8 @@ static void _dump_symtable(PySTEntryObject* ste, PyObject* prefix) ste->ste_comp_iter_target ? " comp_iter_target" : "", ste->ste_can_see_class_scope ? " can_see_class_scope" : "", prefix, - ste->ste_lineno, - ste->ste_col_offset, + ste->ste_loc.lineno, + ste->ste_loc.col_offset, prefix ); assert(msg != NULL); @@ -424,7 +422,9 @@ _PySymtable_Build(mod_ty mod, PyObject *filename, _PyFutureFeatures *future) st->recursion_limit = Py_C_RECURSION_LIMIT; /* Make the initial symbol information gathering pass */ - if (!symtable_enter_block(st, &_Py_ID(top), ModuleBlock, (void *)mod, 0, 0, 0, 0)) { + + _Py_SourceLocation loc0 = {0, 0, 0, 0}; + if (!symtable_enter_block(st, &_Py_ID(top), ModuleBlock, (void *)mod, loc0)) { _PySymtable_Free(st); return NULL; } @@ -1379,11 +1379,9 @@ symtable_enter_existing_block(struct symtable *st, PySTEntryObject* ste) static int symtable_enter_block(struct symtable *st, identifier name, _Py_block_ty block, - void *ast, int lineno, int col_offset, - int end_lineno, int end_col_offset) + void *ast, _Py_SourceLocation loc) { - PySTEntryObject *ste = ste_new(st, name, block, ast, - lineno, col_offset, end_lineno, end_col_offset); + PySTEntryObject *ste = ste_new(st, name, block, ast, loc); if (ste == NULL) return 0; int result = symtable_enter_existing_block(st, ste); @@ -1410,7 +1408,7 @@ symtable_lookup(struct symtable *st, PyObject *name) static int symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _symtable_entry *ste, - int lineno, int col_offset, int end_lineno, int end_col_offset) + _Py_SourceLocation loc) { PyObject *o; PyObject *dict; @@ -1425,16 +1423,12 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s if ((flag & DEF_PARAM) && (val & DEF_PARAM)) { /* Is it better to use 'mangled' or 'name' here? */ PyErr_Format(PyExc_SyntaxError, DUPLICATE_ARGUMENT, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - lineno, col_offset + 1, - end_lineno, end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, loc); goto error; } if ((flag & DEF_TYPE_PARAM) && (val & DEF_TYPE_PARAM)) { PyErr_Format(PyExc_SyntaxError, DUPLICATE_TYPE_PARAM, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - lineno, col_offset + 1, - end_lineno, end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, loc); goto error; } val |= flag; @@ -1454,9 +1448,7 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s if (val & (DEF_GLOBAL | DEF_NONLOCAL)) { PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_INNER_LOOP_CONFLICT, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - lineno, col_offset + 1, - end_lineno, end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, loc); goto error; } val |= DEF_COMP_ITER; @@ -1501,33 +1493,28 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s } static int -symtable_add_def(struct symtable *st, PyObject *name, int flag, - int lineno, int col_offset, int end_lineno, int end_col_offset) +symtable_add_def(struct symtable *st, PyObject *name, int flag, _Py_SourceLocation loc) { if ((flag & DEF_TYPE_PARAM) && st->st_cur->ste_mangled_names != NULL) { if(PySet_Add(st->st_cur->ste_mangled_names, name) < 0) { return 0; } } - return symtable_add_def_helper(st, name, flag, st->st_cur, - lineno, col_offset, end_lineno, end_col_offset); + return symtable_add_def_helper(st, name, flag, st->st_cur, loc); } static int symtable_enter_type_param_block(struct symtable *st, identifier name, void *ast, int has_defaults, int has_kwdefaults, - enum _stmt_kind kind, - int lineno, int col_offset, - int end_lineno, int end_col_offset) + enum _stmt_kind kind, _Py_SourceLocation loc) { _Py_block_ty current_type = st->st_cur->ste_type; - if(!symtable_enter_block(st, name, TypeParametersBlock, ast, lineno, - col_offset, end_lineno, end_col_offset)) { + if(!symtable_enter_block(st, name, TypeParametersBlock, ast, loc)) { return 0; } if (current_type == ClassBlock) { st->st_cur->ste_can_see_class_scope = 1; - if (!symtable_add_def(st, &_Py_ID(__classdict__), USE, lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_ID(__classdict__), USE, loc)) { return 0; } } @@ -1535,36 +1522,30 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, _Py_DECLARE_STR(type_params, ".type_params"); // It gets "set" when we create the type params tuple and // "used" when we build up the bases. - if (!symtable_add_def(st, &_Py_STR(type_params), DEF_LOCAL, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(type_params), DEF_LOCAL, loc)) { return 0; } - if (!symtable_add_def(st, &_Py_STR(type_params), USE, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(type_params), USE, loc)) { return 0; } // This is used for setting the generic base _Py_DECLARE_STR(generic_base, ".generic_base"); - if (!symtable_add_def(st, &_Py_STR(generic_base), DEF_LOCAL, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(generic_base), DEF_LOCAL, loc)) { return 0; } - if (!symtable_add_def(st, &_Py_STR(generic_base), USE, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(generic_base), USE, loc)) { return 0; } } if (has_defaults) { _Py_DECLARE_STR(defaults, ".defaults"); - if (!symtable_add_def(st, &_Py_STR(defaults), DEF_PARAM, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(defaults), DEF_PARAM, loc)) { return 0; } } if (has_kwdefaults) { _Py_DECLARE_STR(kwdefaults, ".kwdefaults"); - if (!symtable_add_def(st, &_Py_STR(kwdefaults), DEF_PARAM, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(kwdefaults), DEF_PARAM, loc)) { return 0; } } @@ -1627,8 +1608,7 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, } while(0) static int -symtable_record_directive(struct symtable *st, identifier name, int lineno, - int col_offset, int end_lineno, int end_col_offset) +symtable_record_directive(struct symtable *st, identifier name, _Py_SourceLocation loc) { PyObject *data, *mangled; int res; @@ -1640,7 +1620,8 @@ symtable_record_directive(struct symtable *st, identifier name, int lineno, mangled = _Py_MaybeMangle(st->st_private, st->st_cur, name); if (!mangled) return 0; - data = Py_BuildValue("(Niiii)", mangled, lineno, col_offset, end_lineno, end_col_offset); + data = Py_BuildValue("(Niiii)", mangled, loc.lineno, loc.col_offset, + loc.end_lineno, loc.end_col_offset); if (!data) return 0; res = PyList_Append(st->st_cur->ste_directives, data); @@ -1673,9 +1654,7 @@ check_import_from(struct symtable *st, stmt_ty s) PyErr_SetString(PyExc_SyntaxError, "from __future__ imports must occur " "at the beginning of the file"); - PyErr_RangedSyntaxLocationObject(st->st_filename, - s->lineno, s->col_offset + 1, - s->end_lineno, s->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); return 0; } return 1; @@ -1772,9 +1751,9 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) VISIT_SEQ(st, expr, s->v.ClassDef.bases); VISIT_SEQ(st, keyword, s->v.ClassDef.keywords); if (!symtable_enter_block(st, s->v.ClassDef.name, ClassBlock, - (void *)s, s->lineno, s->col_offset, - s->end_lineno, s->end_col_offset)) + (void *)s, LOCATION(s))) { VISIT_QUIT(st, 0); + } st->st_private = s->v.ClassDef.name; if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) { if (!symtable_add_def(st, &_Py_ID(__type_params__), @@ -1814,8 +1793,9 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) VISIT_SEQ(st, type_param, s->v.TypeAlias.type_params); } if (!symtable_enter_block(st, name, TypeAliasBlock, - (void *)s, LOCATION(s))) + (void *)s, LOCATION(s))) { VISIT_QUIT(st, 0); + } st->st_cur->ste_can_see_class_scope = is_in_class; if (is_in_class && !symtable_add_def(st, &_Py_ID(__classdict__), USE, LOCATION(s->v.TypeAlias.value))) { VISIT_QUIT(st, 0); @@ -1856,11 +1836,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) PyErr_Format(PyExc_SyntaxError, cur & DEF_GLOBAL ? GLOBAL_ANNOT : NONLOCAL_ANNOT, e_name->v.Name.id); - PyErr_RangedSyntaxLocationObject(st->st_filename, - s->lineno, - s->col_offset + 1, - s->end_lineno, - s->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); VISIT_QUIT(st, 0); } if (s->v.AnnAssign.simple && @@ -1970,18 +1946,15 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) } PyErr_Format(PyExc_SyntaxError, msg, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - s->lineno, - s->col_offset + 1, - s->end_lineno, - s->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); VISIT_QUIT(st, 0); } - if (!symtable_add_def(st, name, DEF_GLOBAL, LOCATION(s))) + if (!symtable_add_def(st, name, DEF_GLOBAL, LOCATION(s))) { VISIT_QUIT(st, 0); - if (!symtable_record_directive(st, name, s->lineno, s->col_offset, - s->end_lineno, s->end_col_offset)) + } + if (!symtable_record_directive(st, name, LOCATION(s))) { VISIT_QUIT(st, 0); + } } break; } @@ -2005,18 +1978,14 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) msg = NONLOCAL_AFTER_ASSIGN; } PyErr_Format(PyExc_SyntaxError, msg, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - s->lineno, - s->col_offset + 1, - s->end_lineno, - s->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); VISIT_QUIT(st, 0); } if (!symtable_add_def(st, name, DEF_NONLOCAL, LOCATION(s))) VISIT_QUIT(st, 0); - if (!symtable_record_directive(st, name, s->lineno, s->col_offset, - s->end_lineno, s->end_col_offset)) + if (!symtable_record_directive(st, name, LOCATION(s))) { VISIT_QUIT(st, 0); + } } break; } @@ -2085,11 +2054,17 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) } case AsyncWith_kind: maybe_set_ste_coroutine_for_module(st, s); + if (!symtable_raise_if_not_coroutine(st, ASYNC_WITH_OUTISDE_ASYNC_FUNC, LOCATION(s))) { + VISIT_QUIT(st, 0); + } VISIT_SEQ(st, withitem, s->v.AsyncWith.items); VISIT_SEQ(st, stmt, s->v.AsyncWith.body); break; case AsyncFor_kind: maybe_set_ste_coroutine_for_module(st, s); + if (!symtable_raise_if_not_coroutine(st, ASYNC_FOR_OUTISDE_ASYNC_FUNC, LOCATION(s))) { + VISIT_QUIT(st, 0); + } VISIT(st, expr, s->v.AsyncFor.target); VISIT(st, expr, s->v.AsyncFor.iter); VISIT_SEQ(st, stmt, s->v.AsyncFor.body); @@ -2124,11 +2099,7 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) if ((target_in_scope & DEF_COMP_ITER) && (target_in_scope & DEF_LOCAL)) { PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_CONFLICT, target_name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, - e->col_offset + 1, - e->end_lineno, - e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); VISIT_QUIT(st, 0); } continue; @@ -2141,20 +2112,24 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) VISIT_QUIT(st, 0); } else { - if (!symtable_add_def(st, target_name, DEF_NONLOCAL, LOCATION(e))) + if (!symtable_add_def(st, target_name, DEF_NONLOCAL, LOCATION(e))) { VISIT_QUIT(st, 0); + } } - if (!symtable_record_directive(st, target_name, LOCATION(e))) + if (!symtable_record_directive(st, target_name, LOCATION(e))) { VISIT_QUIT(st, 0); + } return symtable_add_def_helper(st, target_name, DEF_LOCAL, ste, LOCATION(e)); } /* If we find a ModuleBlock entry, add as GLOBAL */ if (ste->ste_type == ModuleBlock) { - if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) + if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) { VISIT_QUIT(st, 0); - if (!symtable_record_directive(st, target_name, LOCATION(e))) + } + if (!symtable_record_directive(st, target_name, LOCATION(e))) { VISIT_QUIT(st, 0); + } return symtable_add_def_helper(st, target_name, DEF_GLOBAL, ste, LOCATION(e)); } @@ -2179,11 +2154,7 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) default: Py_UNREACHABLE(); } - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, - e->col_offset + 1, - e->end_lineno, - e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); VISIT_QUIT(st, 0); } } @@ -2201,11 +2172,7 @@ symtable_handle_namedexpr(struct symtable *st, expr_ty e) if (st->st_cur->ste_comp_iter_expr > 0) { /* Assignment isn't allowed in a comprehension iterable expression */ PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_ITER_EXPR); - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, - e->col_offset + 1, - e->end_lineno, - e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); return 0; } if (st->st_cur->ste_comprehension) { @@ -2250,10 +2217,9 @@ symtable_visit_expr(struct symtable *st, expr_ty e) if (e->v.Lambda.args->kw_defaults) VISIT_SEQ_WITH_NULL(st, expr, e->v.Lambda.args->kw_defaults); if (!symtable_enter_block(st, &_Py_ID(lambda), - FunctionBlock, (void *)e, - e->lineno, e->col_offset, - e->end_lineno, e->end_col_offset)) + FunctionBlock, (void *)e, LOCATION(e))) { VISIT_QUIT(st, 0); + } VISIT(st, arguments, e->v.Lambda.args); VISIT(st, expr, e->v.Lambda.body); if (!symtable_exit_block(st)) @@ -2385,8 +2351,9 @@ symtable_visit_type_param_bound_or_default( { if (e) { int is_in_class = st->st_cur->ste_can_see_class_scope; - if (!symtable_enter_block(st, name, TypeVariableBlock, key, LOCATION(e))) + if (!symtable_enter_block(st, name, TypeVariableBlock, key, LOCATION(e))) { return 0; + } st->st_cur->ste_can_see_class_scope = is_in_class; if (is_in_class && !symtable_add_def(st, &_Py_ID(__classdict__), USE, LOCATION(e))) { @@ -2519,7 +2486,7 @@ symtable_implicit_arg(struct symtable *st, int pos) PyObject *id = PyUnicode_FromFormat(".%d", pos); if (id == NULL) return 0; - if (!symtable_add_def(st, id, DEF_PARAM, ST_LOCATION(st->st_cur))) { + if (!symtable_add_def(st, id, DEF_PARAM, st->st_cur->ste_loc)) { Py_DECREF(id); return 0; } @@ -2740,14 +2707,8 @@ symtable_visit_alias(struct symtable *st, alias_ty a) } else { if (st->st_cur->ste_type != ModuleBlock) { - int lineno = a->lineno; - int col_offset = a->col_offset; - int end_lineno = a->end_lineno; - int end_col_offset = a->end_col_offset; PyErr_SetString(PyExc_SyntaxError, IMPORT_STAR_WARNING); - PyErr_RangedSyntaxLocationObject(st->st_filename, - lineno, col_offset + 1, - end_lineno, end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(a)); Py_DECREF(store_name); return 0; } @@ -2796,9 +2757,7 @@ symtable_handle_comprehension(struct symtable *st, expr_ty e, st->st_cur->ste_comp_iter_expr--; /* Create comprehension scope for the rest */ if (!scope_name || - !symtable_enter_block(st, scope_name, FunctionBlock, (void *)e, - e->lineno, e->col_offset, - e->end_lineno, e->end_col_offset)) { + !symtable_enter_block(st, scope_name, FunctionBlock, (void *)e, LOCATION(e))) { return 0; } switch(e->kind) { @@ -2902,11 +2861,7 @@ symtable_raise_if_annotation_block(struct symtable *st, const char *name, expr_t else return 1; - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, - e->col_offset + 1, - e->end_lineno, - e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); return 0; } @@ -2918,12 +2873,20 @@ symtable_raise_if_comprehension_block(struct symtable *st, expr_ty e) { (type == SetComprehension) ? "'yield' inside set comprehension" : (type == DictComprehension) ? "'yield' inside dict comprehension" : "'yield' inside generator expression"); - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, e->col_offset + 1, - e->end_lineno, e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); VISIT_QUIT(st, 0); } +static int +symtable_raise_if_not_coroutine(struct symtable *st, const char *msg, _Py_SourceLocation loc) { + if (!st->st_cur->ste_coroutine) { + PyErr_SetString(PyExc_SyntaxError, msg); + SET_ERROR_LOCATION(st->st_filename, loc); + return 0; + } + return 1; +} + struct symtable * _Py_SymtableStringObjectFlags(const char *str, PyObject *filename, int start, PyCompilerFlags *flags) diff --git a/Python/tracemalloc.c b/Python/tracemalloc.c index fee7dd0e56d96d..e58b60ddd5e484 100644 --- a/Python/tracemalloc.c +++ b/Python/tracemalloc.c @@ -838,7 +838,7 @@ _PyTraceMalloc_Init(void) tracemalloc_tracebacks = hashtable_new(hashtable_hash_traceback, hashtable_compare_traceback, - NULL, raw_free); + raw_free, NULL); tracemalloc_traces = tracemalloc_create_traces_table(); tracemalloc_domains = tracemalloc_create_domains_table(); diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index f92560bd2b76b3..ec365bad3992d5 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -106,13 +106,15 @@ class StackItem: def __str__(self) -> str: cond = f" if ({self.condition})" if self.condition else "" - size = f"[{self.size}]" if self.size != "1" else "" + size = f"[{self.size}]" if self.size else "" type = "" if self.type is None else f"{self.type} " return f"{type}{self.name}{size}{cond} {self.peek}" def is_array(self) -> bool: - return self.type == "_PyStackRef *" + return self.size != "" + def get_size(self) -> str: + return self.size if self.size else "1" @dataclass class StackEffect: @@ -293,7 +295,7 @@ def convert_stack_item(item: parser.StackEffect, replace_op_arg_1: str | None) - if replace_op_arg_1 and OPARG_AND_1.match(item.cond): cond = replace_op_arg_1 return StackItem( - item.name, item.type, cond, (item.size or "1") + item.name, item.type, cond, item.size ) def analyze_stack(op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | None = None) -> StackEffect: diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index e4e0c9b658c19d..9314bb9e79687f 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -5,9 +5,10 @@ Instruction, Uop, Properties, + StackItem, ) from cwriter import CWriter -from typing import Callable, Mapping, TextIO, Iterator +from typing import Callable, Mapping, TextIO, Iterator, Tuple from lexer import Token from stack import Stack @@ -24,6 +25,15 @@ def root_relative_path(filename: str) -> str: return filename +def type_and_null(var: StackItem) -> Tuple[str, str]: + if var.type: + return var.type, "NULL" + elif var.is_array(): + return "_PyStackRef *", "NULL" + else: + return "_PyStackRef", "PyStackRef_NULL" + + def write_header( generator: str, sources: list[str], outfile: TextIO, comment: str = "//" ) -> None: @@ -126,7 +136,7 @@ def replace_decrefs( for var in uop.stack.inputs: if var.name == "unused" or var.name == "null" or var.peek: continue - if var.size != "1": + if var.size: out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n") out.emit(f"PyStackRef_CLOSE({var.name}[_i]);\n") out.emit("}\n") diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py index 0bd4229e2beaf2..8957838f7a90a1 100644 --- a/Tools/cases_generator/parsing.py +++ b/Tools/cases_generator/parsing.py @@ -285,7 +285,6 @@ def stack_effect(self) -> StackEffect | None: if not (size := self.expression()): raise self.make_syntax_error("Expected expression") self.require(lx.RBRACKET) - type_text = "_PyStackRef *" size_text = size.text.strip() return StackEffect(tkn.text, type_text, cond_text, size_text) return None diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index c0e1278e519143..ebe62df537f15f 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -28,14 +28,15 @@ def var_size(var: StackItem) -> str: if var.condition == "0": return "0" elif var.condition == "1": - return var.size - elif var.condition == "oparg & 1" and var.size == "1": + return var.get_size() + elif var.condition == "oparg & 1" and not var.size: return f"({var.condition})" else: - return f"(({var.condition}) ? {var.size} : 0)" - else: + return f"(({var.condition}) ? {var.get_size()} : 0)" + elif var.size: return var.size - + else: + return "1" @dataclass class StackOffset: diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index c9dce1d5f1804e..85be673b1c396c 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -13,12 +13,14 @@ analyze_files, Skip, analysis_error, + StackItem, ) from generators_common import ( DEFAULT_INPUT, ROOT, write_header, emit_tokens, + type_and_null, ) from cwriter import CWriter from typing import TextIO @@ -38,19 +40,16 @@ def declare_variables(inst: Instruction, out: CWriter) -> None: for var in reversed(uop.stack.inputs): if var.name not in variables: variables.add(var.name) - type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL") + type, null = type_and_null(var) space = " " if type[-1].isalnum() else "" if var.condition: out.emit(f"{type}{space}{var.name} = {null};\n") else: - if var.is_array(): - out.emit(f"{var.type}{space}{var.name};\n") - else: - out.emit(f"{type}{space}{var.name};\n") + out.emit(f"{type}{space}{var.name};\n") for var in uop.stack.outputs: if var.name not in variables: variables.add(var.name) - type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL") + type, null = type_and_null(var) space = " " if type[-1].isalnum() else "" if var.condition: out.emit(f"{type}{space}{var.name} = {null};\n") diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py index f3769bd31c295d..7a69aa6e121fa7 100644 --- a/Tools/cases_generator/tier2_generator.py +++ b/Tools/cases_generator/tier2_generator.py @@ -20,6 +20,7 @@ emit_tokens, emit_to, REPLACEMENT_FUNCTIONS, + type_and_null, ) from cwriter import CWriter from typing import TextIO, Iterator @@ -35,7 +36,7 @@ def declare_variable( if var.name in variables: return variables.add(var.name) - type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL") + type, null = type_and_null(var) space = " " if type[-1].isalnum() else "" if var.condition: out.emit(f"{type}{space}{var.name} = {null};\n") diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt index 534a0cedb743dd..0955387dfb8370 100644 --- a/Tools/tsan/suppressions_free_threading.txt +++ b/Tools/tsan/suppressions_free_threading.txt @@ -23,25 +23,12 @@ race:free_threadstate # These warnings trigger directly in a CPython function. -race_top:_add_to_weak_set -race_top:_in_weak_set race_top:_PyEval_EvalFrameDefault race_top:assign_version_tag -race_top:insertdict -race_top:lookup_tp_dict race_top:new_reference -# https://gist.github.com/colesbury/d13d033f413b4ad07929d044bed86c35 -race_top:set_discard_entry -race_top:_PyDict_CheckConsistency -race_top:_Py_dict_lookup_threadsafe race_top:_multiprocessing_SemLock_acquire_impl -race_top:dictiter_new -race_top:dictresize -race_top:insert_to_emptydict -race_top:insertdict race_top:list_get_item_ref race_top:make_pending_calls -race_top:set_add_entry race_top:_Py_slot_tp_getattr_hook race_top:add_threadstate race_top:dump_traceback diff --git a/Tools/wasm/wasi.py b/Tools/wasm/wasi.py index f69299fd662806..a14f58bdac0cb2 100644 --- a/Tools/wasm/wasi.py +++ b/Tools/wasm/wasi.py @@ -26,6 +26,9 @@ LOCAL_SETUP = CHECKOUT / "Modules" / "Setup.local" LOCAL_SETUP_MARKER = "# Generated by Tools/wasm/wasi.py\n".encode("utf-8") +WASMTIME_VAR_NAME = "WASMTIME" +WASMTIME_HOST_RUNNER_VAR = f"{{{WASMTIME_VAR_NAME}}}" + def updated_env(updates={}): """Create a new dict representing the environment to use. @@ -215,11 +218,20 @@ def configure_wasi_python(context, working_dir): # Use PYTHONPATH to include sysconfig data which must be anchored to the # WASI guest's `/` directory. - host_runner = context.host_runner.format(GUEST_DIR="/", - HOST_DIR=CHECKOUT, - ENV_VAR_NAME="PYTHONPATH", - ENV_VAR_VALUE=f"/{sysconfig_data}", - PYTHON_WASM=working_dir / "python.wasm") + args = {"GUEST_DIR": "/", + "HOST_DIR": CHECKOUT, + "ENV_VAR_NAME": "PYTHONPATH", + "ENV_VAR_VALUE": f"/{sysconfig_data}", + "PYTHON_WASM": working_dir / "python.wasm"} + # Check dynamically for wasmtime in case it was specified manually via + # `--host-runner`. + if WASMTIME_HOST_RUNNER_VAR in context.host_runner: + if wasmtime := shutil.which("wasmtime"): + args[WASMTIME_VAR_NAME] = wasmtime + else: + raise FileNotFoundError("wasmtime not found; download from " + "https://github.com/bytecodealliance/wasmtime") + host_runner = context.host_runner.format_map(args) env_additions = {"CONFIG_SITE": config_site, "HOSTRUNNER": host_runner} build_python = os.fsdecode(build_python_path()) # The path to `configure` MUST be relative, else `python.wasm` is unable @@ -277,7 +289,7 @@ def clean_contents(context): def main(): - default_host_runner = (f"{shutil.which('wasmtime')} run " + default_host_runner = (f"{WASMTIME_HOST_RUNNER_VAR} run " # Make sure the stack size will work for a pydebug # build. # Use 16 MiB stack. diff --git a/configure b/configure index 922d33edc00cb5..bbfa805883cac5 100755 --- a/configure +++ b/configure @@ -795,8 +795,6 @@ MODULE__POSIXSUBPROCESS_FALSE MODULE__POSIXSUBPROCESS_TRUE MODULE__PICKLE_FALSE MODULE__PICKLE_TRUE -MODULE__OPCODE_FALSE -MODULE__OPCODE_TRUE MODULE__LSPROF_FALSE MODULE__LSPROF_TRUE MODULE__JSON_FALSE @@ -13292,8 +13290,6 @@ case $PLATFORM_TRIPLET in #( perf_trampoline=yes ;; #( aarch64-linux-gnu) : perf_trampoline=yes ;; #( - riscv64-linux-gnu) : - perf_trampoline=yes ;; #( *) : perf_trampoline=no ;; @@ -29233,28 +29229,6 @@ then : -fi - - - if test "$py_cv_module__opcode" != "n/a" -then : - py_cv_module__opcode=yes -fi - if test "$py_cv_module__opcode" = yes; then - MODULE__OPCODE_TRUE= - MODULE__OPCODE_FALSE='#' -else - MODULE__OPCODE_TRUE='#' - MODULE__OPCODE_FALSE= -fi - - as_fn_append MODULE_BLOCK "MODULE__OPCODE_STATE=$py_cv_module__opcode$as_nl" - if test "x$py_cv_module__opcode" = xyes -then : - - - - fi @@ -31786,10 +31760,6 @@ if test -z "${MODULE__LSPROF_TRUE}" && test -z "${MODULE__LSPROF_FALSE}"; then as_fn_error $? "conditional \"MODULE__LSPROF\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi -if test -z "${MODULE__OPCODE_TRUE}" && test -z "${MODULE__OPCODE_FALSE}"; then - as_fn_error $? "conditional \"MODULE__OPCODE\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi if test -z "${MODULE__PICKLE_TRUE}" && test -z "${MODULE__PICKLE_FALSE}"; then as_fn_error $? "conditional \"MODULE__PICKLE\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 diff --git a/configure.ac b/configure.ac index a70e673623de81..87c4df20818808 100644 --- a/configure.ac +++ b/configure.ac @@ -3709,7 +3709,6 @@ AC_MSG_CHECKING([perf trampoline]) AS_CASE([$PLATFORM_TRIPLET], [x86_64-linux-gnu], [perf_trampoline=yes], [aarch64-linux-gnu], [perf_trampoline=yes], - [riscv64-linux-gnu], [perf_trampoline=yes], [perf_trampoline=no] ) AC_MSG_RESULT([$perf_trampoline]) @@ -7690,7 +7689,6 @@ PY_STDLIB_MOD_SIMPLE([_csv]) PY_STDLIB_MOD_SIMPLE([_heapq]) PY_STDLIB_MOD_SIMPLE([_json]) PY_STDLIB_MOD_SIMPLE([_lsprof]) -PY_STDLIB_MOD_SIMPLE([_opcode]) PY_STDLIB_MOD_SIMPLE([_pickle]) PY_STDLIB_MOD_SIMPLE([_posixsubprocess]) PY_STDLIB_MOD_SIMPLE([_queue])